From 61a5d3bf745258e06cc053ae06e66da0b4cfc120 Mon Sep 17 00:00:00 2001 From: dreamsyntax Date: Sun, 17 Oct 2021 00:18:48 -0700 Subject: [PATCH 001/237] DolphinQt/MenuBar: add "reset ignore panic handler" button --- Source/Core/DolphinQt/MenuBar.cpp | 8 ++++++++ Source/Core/DolphinQt/MenuBar.h | 1 + 2 files changed, 9 insertions(+) diff --git a/Source/Core/DolphinQt/MenuBar.cpp b/Source/Core/DolphinQt/MenuBar.cpp index 48a18dbbb078..87ec7091ac20 100644 --- a/Source/Core/DolphinQt/MenuBar.cpp +++ b/Source/Core/DolphinQt/MenuBar.cpp @@ -165,6 +165,7 @@ void MenuBar::OnDebugModeToggled(bool enabled) // Options m_boot_to_pause->setVisible(enabled); m_automatic_start->setVisible(enabled); + m_reset_ignore_panic_handler->setVisible(enabled); m_change_font->setVisible(enabled); // View @@ -543,6 +544,13 @@ void MenuBar::AddOptionsMenu() connect(m_automatic_start, &QAction::toggled, this, [](bool enable) { SConfig::GetInstance().bAutomaticStart = enable; }); + m_reset_ignore_panic_handler = options_menu->addAction(tr("Reset Ignore Panic Handler")); + + connect(m_reset_ignore_panic_handler, &QAction::triggered, this, []() { + if (Config::Get(Config::MAIN_USE_PANIC_HANDLERS)) + Common::SetEnableAlert(true); + }); + m_change_font = options_menu->addAction(tr("&Font..."), this, &MenuBar::ChangeDebugFont); } diff --git a/Source/Core/DolphinQt/MenuBar.h b/Source/Core/DolphinQt/MenuBar.h index d745f52e9bfa..0841bdff0709 100644 --- a/Source/Core/DolphinQt/MenuBar.h +++ b/Source/Core/DolphinQt/MenuBar.h @@ -234,6 +234,7 @@ class MenuBar final : public QMenuBar // Options QAction* m_boot_to_pause; QAction* m_automatic_start; + QAction* m_reset_ignore_panic_handler; QAction* m_change_font; QAction* m_controllers_action; From 3175e38a2eaa387de29c67d9198a234bc17e55fe Mon Sep 17 00:00:00 2001 From: Shawn Hoffman Date: Sun, 16 Jan 2022 22:34:39 -0800 Subject: [PATCH 002/237] GXPipelineUid: remove explicit shallow copy where it's the default --- Source/Core/VideoCommon/GXPipelineTypes.h | 35 ----------------------- 1 file changed, 35 deletions(-) diff --git a/Source/Core/VideoCommon/GXPipelineTypes.h b/Source/Core/VideoCommon/GXPipelineTypes.h index ca3fb897010e..b448dc107f89 100644 --- a/Source/Core/VideoCommon/GXPipelineTypes.h +++ b/Source/Core/VideoCommon/GXPipelineTypes.h @@ -35,24 +35,6 @@ struct GXPipelineUid // and this map lookup can happen every draw call. However, as using memcmp() will also compare // any padding bytes, we have to ensure these are zeroed out. GXPipelineUid() { std::memset(static_cast(this), 0, sizeof(*this)); } -#ifdef _MSC_VER -#pragma warning(push) -// Disable warning for uninitialized member variables, as MSVC doesn't recognise that memcpy -// performs this initialization. -#pragma warning(disable : 26495) -#endif - GXPipelineUid(const GXPipelineUid& rhs) - { - std::memcpy(static_cast(this), &rhs, sizeof(*this)); - } -#ifdef _MSC_VER -#pragma warning(pop) -#endif - GXPipelineUid& operator=(const GXPipelineUid& rhs) - { - std::memcpy(static_cast(this), &rhs, sizeof(*this)); - return *this; - } bool operator<(const GXPipelineUid& rhs) const { return std::memcmp(this, &rhs, sizeof(*this)) < 0; @@ -74,23 +56,6 @@ struct GXUberPipelineUid BlendingState blending_state; GXUberPipelineUid() { std::memset(static_cast(this), 0, sizeof(*this)); } -#ifdef _MSC_VER -#pragma warning(push) -// Disable warning for uninitialized member variables -#pragma warning(disable : 26495) -#endif - GXUberPipelineUid(const GXUberPipelineUid& rhs) - { - std::memcpy(static_cast(this), &rhs, sizeof(*this)); - } -#ifdef _MSC_VER -#pragma warning(pop) -#endif - GXUberPipelineUid& operator=(const GXUberPipelineUid& rhs) - { - std::memcpy(static_cast(this), &rhs, sizeof(*this)); - return *this; - } bool operator<(const GXUberPipelineUid& rhs) const { return std::memcmp(this, &rhs, sizeof(*this)) < 0; From a3f5d09a5429464a127f9adfab40ce5b7ea5ff25 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 22 Jan 2022 16:00:29 +0100 Subject: [PATCH 003/237] Don't create GBA saves dir when building without mGBA Currently, disabling mGBA when building gets rid of the ability to change the GBA saves directory in DolphinQt, but it doesn't actually get rid of Dolphin loading and storing the setting and creating the folder. If the setting is set to a path you don't want to use (perhaps you are trying to turn Dolphin portable), this is annoying. To avoid accidentally making mistakes like this in the future, I'm gating the existence of the setting behind an ifdef. --- Source/Core/Core/Config/MainSettings.cpp | 2 ++ Source/Core/Core/Config/MainSettings.h | 2 ++ Source/Core/Core/ConfigLoaders/NetPlayConfigLoader.cpp | 2 ++ Source/Core/UICommon/UICommon.cpp | 2 ++ 4 files changed, 8 insertions(+) diff --git a/Source/Core/Core/Config/MainSettings.cpp b/Source/Core/Core/Config/MainSettings.cpp index 650592f1da60..4e6bb54ec727 100644 --- a/Source/Core/Core/Config/MainSettings.cpp +++ b/Source/Core/Core/Config/MainSettings.cpp @@ -307,6 +307,7 @@ void SetIsoPaths(const std::vector& paths) // Main.GBA +#ifdef HAS_LIBMGBA const Info MAIN_GBA_BIOS_PATH{{System::Main, "GBA", "BIOS"}, ""}; const std::array, 4> MAIN_GBA_ROM_PATHS{ Info{{System::Main, "GBA", "Rom1"}, ""}, @@ -316,6 +317,7 @@ const std::array, 4> MAIN_GBA_ROM_PATHS{ const Info MAIN_GBA_SAVES_PATH{{System::Main, "GBA", "SavesPath"}, ""}; const Info MAIN_GBA_SAVES_IN_ROM_PATH{{System::Main, "GBA", "SavesInRomPath"}, false}; const Info MAIN_GBA_THREADS{{System::Main, "GBA", "Threads"}, true}; +#endif // Main.Network diff --git a/Source/Core/Core/Config/MainSettings.h b/Source/Core/Core/Config/MainSettings.h index 8a2b8be500f9..c97a32e97a28 100644 --- a/Source/Core/Core/Config/MainSettings.h +++ b/Source/Core/Core/Config/MainSettings.h @@ -179,11 +179,13 @@ void SetIsoPaths(const std::vector& paths); // Main.GBA +#ifdef HAS_LIBMGBA extern const Info MAIN_GBA_BIOS_PATH; extern const std::array, 4> MAIN_GBA_ROM_PATHS; extern const Info MAIN_GBA_SAVES_PATH; extern const Info MAIN_GBA_SAVES_IN_ROM_PATH; extern const Info MAIN_GBA_THREADS; +#endif // Main.Network diff --git a/Source/Core/Core/ConfigLoaders/NetPlayConfigLoader.cpp b/Source/Core/Core/ConfigLoaders/NetPlayConfigLoader.cpp index 92dd1a9cfca2..7e7d4f949d52 100644 --- a/Source/Core/Core/ConfigLoaders/NetPlayConfigLoader.cpp +++ b/Source/Core/Core/ConfigLoaders/NetPlayConfigLoader.cpp @@ -138,10 +138,12 @@ class NetPlayConfigLayerLoader final : public Config::ConfigLayerLoader layer->Set(Config::SESSION_GCI_FOLDER_CURRENT_GAME_ONLY, true); } +#ifdef HAS_LIBMGBA for (size_t i = 0; i < m_settings.m_GBARomPaths.size(); ++i) { layer->Set(Config::MAIN_GBA_ROM_PATHS[i], m_settings.m_GBARomPaths[i]); } +#endif // Check To Override Client's Cheat Codes if (m_settings.m_SyncCodes && !m_settings.m_IsHosting) diff --git a/Source/Core/UICommon/UICommon.cpp b/Source/Core/UICommon/UICommon.cpp index a5f072125139..1e6aa4bbb9c5 100644 --- a/Source/Core/UICommon/UICommon.cpp +++ b/Source/Core/UICommon/UICommon.cpp @@ -91,9 +91,11 @@ static void InitCustomPaths() CreateResourcePackPath(Config::Get(Config::MAIN_RESOURCEPACK_PATH)); CreateWFSPath(Config::Get(Config::MAIN_WFS_PATH)); File::SetUserPath(F_WIISDCARD_IDX, Config::Get(Config::MAIN_SD_PATH)); +#ifdef HAS_LIBMGBA File::SetUserPath(F_GBABIOS_IDX, Config::Get(Config::MAIN_GBA_BIOS_PATH)); File::SetUserPath(D_GBASAVES_IDX, Config::Get(Config::MAIN_GBA_SAVES_PATH)); File::CreateFullPath(File::GetUserPath(D_GBASAVES_IDX)); +#endif } void Init() From d3ae1bd415f328ebd679828a79dca219856c1e39 Mon Sep 17 00:00:00 2001 From: Dentomologist Date: Tue, 25 Jan 2022 14:15:18 -0800 Subject: [PATCH 004/237] [Android] Fix unused variable warning Add [[maybe_unused]] attribute to ConsoleListener's m_use_color --- Source/Core/Common/Logging/ConsoleListener.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/Common/Logging/ConsoleListener.h b/Source/Core/Common/Logging/ConsoleListener.h index 2c5e744751fb..3901f29580a8 100644 --- a/Source/Core/Common/Logging/ConsoleListener.h +++ b/Source/Core/Common/Logging/ConsoleListener.h @@ -14,5 +14,5 @@ class ConsoleListener : public Common::Log::LogListener void Log(Common::Log::LogLevel level, const char* text) override; private: - bool m_use_color = false; + [[maybe_unused]] bool m_use_color = false; }; From 8e3dbe9671afa631e7a64496f5bfcb2a8206f152 Mon Sep 17 00:00:00 2001 From: iwubcode Date: Sat, 29 Jan 2022 00:47:27 -0600 Subject: [PATCH 005/237] Externals / Vulkan: update Vulkan headers to v1.3.204. Fix default present mode in Vulkan swap chain --- Externals/Vulkan/Include/vulkan/vk_icd.h | 78 +- Externals/Vulkan/Include/vulkan/vk_layer.h | 16 +- Externals/Vulkan/Include/vulkan/vk_platform.h | 20 +- Externals/Vulkan/Include/vulkan/vulkan.h | 30 +- Externals/Vulkan/Include/vulkan/vulkan.hpp | 79600 ++--------- .../Vulkan/Include/vulkan/vulkan_android.h | 29 +- Externals/Vulkan/Include/vulkan/vulkan_beta.h | 933 + Externals/Vulkan/Include/vulkan/vulkan_core.h | 13968 +- .../Vulkan/Include/vulkan/vulkan_directfb.h | 54 + .../Vulkan/Include/vulkan/vulkan_enums.hpp | 18755 +++ .../Vulkan/Include/vulkan/vulkan_fuchsia.h | 225 +- .../Vulkan/Include/vulkan/vulkan_funcs.hpp | 20957 +++ Externals/Vulkan/Include/vulkan/vulkan_ggp.h | 14 +- .../Vulkan/Include/vulkan/vulkan_handles.hpp | 14971 +++ .../Vulkan/Include/vulkan/vulkan_hash.hpp | 13191 ++ Externals/Vulkan/Include/vulkan/vulkan_ios.h | 16 +- .../Vulkan/Include/vulkan/vulkan_macos.h | 16 +- .../Vulkan/Include/vulkan/vulkan_metal.h | 14 +- .../Vulkan/Include/vulkan/vulkan_raii.hpp | 18669 +++ .../Vulkan/Include/vulkan/vulkan_screen.h | 54 + .../Vulkan/Include/vulkan/vulkan_structs.hpp | 102190 +++++++++++++++ Externals/Vulkan/Include/vulkan/vulkan_vi.h | 14 +- .../Vulkan/Include/vulkan/vulkan_wayland.h | 14 +- .../Vulkan/Include/vulkan/vulkan_win32.h | 17 +- Externals/Vulkan/Include/vulkan/vulkan_xcb.h | 14 +- Externals/Vulkan/Include/vulkan/vulkan_xlib.h | 14 +- .../Include/vulkan/vulkan_xlib_xrandr.h | 14 +- .../Core/VideoBackends/Vulkan/VKSwapChain.h | 2 +- 28 files changed, 210929 insertions(+), 72960 deletions(-) create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_beta.h create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_directfb.h create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_enums.hpp create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_funcs.hpp create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_handles.hpp create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_hash.hpp create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_raii.hpp create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_screen.h create mode 100644 Externals/Vulkan/Include/vulkan/vulkan_structs.hpp diff --git a/Externals/Vulkan/Include/vulkan/vk_icd.h b/Externals/Vulkan/Include/vulkan/vk_icd.h index a2d960a6324f..41989ee35474 100644 --- a/Externals/Vulkan/Include/vulkan/vk_icd.h +++ b/Externals/Vulkan/Include/vulkan/vk_icd.h @@ -33,7 +33,7 @@ // Version 2 - Add Loader/ICD Interface version negotiation // via vk_icdNegotiateLoaderICDInterfaceVersion. // Version 3 - Add ICD creation/destruction of KHR_surface objects. -// Version 4 - Add unknown physical device extension qyering via +// Version 4 - Add unknown physical device extension querying via // vk_icdGetPhysicalDeviceProcAddr. // Version 5 - Tells ICDs that the loader is now paying attention to the // application version of Vulkan passed into the ApplicationInfo @@ -41,17 +41,45 @@ // that if the loader is older, it should automatically fail a // call for any API version > 1.0. Otherwise, the loader will // manually determine if it can support the expected version. -#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5 +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6 #define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 #define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 -typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); // This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this // file directly, it won't be found. #ifndef PFN_GetPhysicalDeviceProcAddr typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); #endif +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + /* * The ICD must reserve space for a pointer for the loader's dispatch * table, at the start of . @@ -89,7 +117,12 @@ typedef enum { VK_ICD_WSI_PLATFORM_MACOS, VK_ICD_WSI_PLATFORM_IOS, VK_ICD_WSI_PLATFORM_DISPLAY, - VK_ICD_WSI_PLATFORM_HEADLESS + VK_ICD_WSI_PLATFORM_HEADLESS, + VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, + VK_ICD_WSI_PLATFORM_GGP, + VK_ICD_WSI_PLATFORM_SCREEN, } VkIcdWsiPlatform; typedef struct { @@ -136,6 +169,14 @@ typedef struct { } VkIcdSurfaceXlib; #endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + #ifdef VK_USE_PLATFORM_ANDROID_KHR typedef struct { VkIcdSurfaceBase base; @@ -157,6 +198,13 @@ typedef struct { } VkIcdSurfaceIOS; #endif // VK_USE_PLATFORM_IOS_MVK +#ifdef VK_USE_PLATFORM_GGP +typedef struct { + VkIcdSurfaceBase base; + GgpStreamDescriptor streamDescriptor; +} VkIcdSurfaceGgp; +#endif // VK_USE_PLATFORM_GGP + typedef struct { VkIcdSurfaceBase base; VkDisplayModeKHR displayMode; @@ -172,4 +220,26 @@ typedef struct { VkIcdSurfaceBase base; } VkIcdSurfaceHeadless; +#ifdef VK_USE_PLATFORM_METAL_EXT +typedef struct { + VkIcdSurfaceBase base; + const CAMetalLayer *pLayer; +} VkIcdSurfaceMetal; +#endif // VK_USE_PLATFORM_METAL_EXT + +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +typedef struct { + VkIcdSurfaceBase base; + struct _screen_context *context; + struct _screen_window *window; +} VkIcdSurfaceScreen; +#endif // VK_USE_PLATFORM_SCREEN_QNX + #endif // VKICD_H diff --git a/Externals/Vulkan/Include/vulkan/vk_layer.h b/Externals/Vulkan/Include/vulkan/vk_layer.h index fa7652008976..0651870c70a4 100644 --- a/Externals/Vulkan/Include/vulkan/vk_layer.h +++ b/Externals/Vulkan/Include/vulkan/vk_layer.h @@ -83,7 +83,8 @@ typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); typedef enum VkLayerFunction_ { VK_LAYER_LINK_INFO = 0, VK_LOADER_DATA_CALLBACK = 1, - VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2 + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, } VkLayerFunction; typedef struct VkLayerInstanceLink_ { @@ -111,6 +112,12 @@ typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + typedef struct { VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO const void *pNext; @@ -119,9 +126,10 @@ typedef struct { VkLayerInstanceLink *pLayerInfo; PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; struct { - PFN_vkLayerCreateDevice pfnLayerCreateDevice; - PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; - } layerDevice; + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; } u; } VkLayerInstanceCreateInfo; diff --git a/Externals/Vulkan/Include/vulkan/vk_platform.h b/Externals/Vulkan/Include/vulkan/vk_platform.h index 7289299240a3..3ff8c5d14671 100644 --- a/Externals/Vulkan/Include/vulkan/vk_platform.h +++ b/Externals/Vulkan/Include/vulkan/vk_platform.h @@ -2,19 +2,9 @@ // File: vk_platform.h // /* -** Copyright (c) 2014-2017 The Khronos Group Inc. +** Copyright 2014-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ @@ -52,7 +42,7 @@ extern "C" #define VKAPI_CALL __stdcall #define VKAPI_PTR VKAPI_CALL #elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 - #error "Vulkan isn't supported for the 'armeabi' NDK ABI" + #error "Vulkan is not supported for the 'armeabi' NDK ABI" #elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" // calling convention, i.e. float parameters are passed in registers. This @@ -68,7 +58,9 @@ extern "C" #define VKAPI_PTR #endif -#include +#if !defined(VK_NO_STDDEF_H) + #include +#endif // !defined(VK_NO_STDDEF_H) #if !defined(VK_NO_STDINT_H) #if defined(_MSC_VER) && (_MSC_VER < 1600) diff --git a/Externals/Vulkan/Include/vulkan/vulkan.h b/Externals/Vulkan/Include/vulkan/vulkan.h index 5f853f9fc8ee..004fa70952a4 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan.h +++ b/Externals/Vulkan/Include/vulkan/vulkan.h @@ -2,19 +2,9 @@ #define VULKAN_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ #include "vk_platform.h" @@ -71,6 +61,12 @@ #endif +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" +#endif + + #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT #include #include @@ -83,4 +79,14 @@ #include "vulkan_ggp.h" #endif + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +#include +#include "vulkan_screen.h" +#endif + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" +#endif + #endif // VULKAN_H_ diff --git a/Externals/Vulkan/Include/vulkan/vulkan.hpp b/Externals/Vulkan/Include/vulkan/vulkan.hpp index ee748481230b..400bdbe94bba 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan.hpp +++ b/Externals/Vulkan/Include/vulkan/vulkan.hpp @@ -1,71253 +1,14823 @@ -// Copyright (c) 2015-2019 The Khronos Group Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ---- Exceptions to the Apache 2.0 License: ---- -// -// As an exception, if you use this Software to generate code and portions of -// this Software are embedded into the generated code as a result, you may -// redistribute such product without providing attribution as would otherwise -// be required by Sections 4(a), 4(b) and 4(d) of the License. -// -// In addition, if you combine or link code generated by this Software with -// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 -// ("`Combined Software`") and if a court of competent jurisdiction determines -// that the patent provision (Section 3), the indemnity provision (Section 9) -// or other Section of the License conflicts with the conditions of the -// applicable GPL or LGPL license, you may retroactively and prospectively -// choose to deem waived or otherwise exclude such Section(s) of the License, -// but only in their entirety and only with respect to the Combined Software. -// +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// // This header is generated from the Khronos Vulkan XML API Registry. #ifndef VULKAN_HPP #define VULKAN_HPP +#if defined( _MSVC_LANG ) +# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG +#else +# define VULKAN_HPP_CPLUSPLUS __cplusplus +#endif + +#if 201703L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" +#endif + #include #include #include #include #include +#include #include +#include #include #include #include #include #include +#if 17 <= VULKAN_HPP_CPP_VERSION +# include +#endif + +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#else +# include +# include +#endif + +#if defined( VULKAN_HPP_NO_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS +# endif +# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +# define VULKAN_HPP_NO_UNION_CONSTRUCTORS +# endif +#endif + +#if defined( VULKAN_HPP_NO_SETTERS ) +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# define VULKAN_HPP_NO_STRUCT_SETTERS +# endif +# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +# define VULKAN_HPP_NO_UNION_SETTERS +# endif +#endif + +#if !defined( VULKAN_HPP_ASSERT ) +# include +# define VULKAN_HPP_ASSERT assert +#endif + +#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + +#if !defined( VULKAN_HPP_STATIC_ASSERT ) +# define VULKAN_HPP_STATIC_ASSERT static_assert +#endif + +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#endif + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) +# include +# elif defined( _WIN32 ) +typedef struct HINSTANCE__ * HINSTANCE; +# if defined( _WIN64 ) +typedef int64_t( __stdcall * FARPROC )(); +# else +typedef int( __stdcall * FARPROC )(); +# endif +extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); +extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); +extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); +# endif +#endif + +#if !defined( __has_include ) +# define __has_include( x ) false +#endif -#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) -# include -# include +#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +# include #endif -#if !defined(VULKAN_HPP_ASSERT) -# include -# define VULKAN_HPP_ASSERT assert +#if ( 201803 <= __cpp_lib_span ) +# define VULKAN_HPP_SUPPORT_SPAN +# include #endif -static_assert( VK_HEADER_VERSION == 121 , "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 204, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) -# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) -# define VULKAN_HPP_TYPESAFE_CONVERSION -# endif +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif #endif // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) // which is an expression in a constructor initializer list. -#if defined(major) - #undef major +#if defined( major ) +# undef major #endif -#if defined(minor) - #undef minor +#if defined( minor ) +# undef minor #endif // Windows defines MemoryBarrier which is deprecated and collides -// with the vk::MemoryBarrier struct. -#if defined(MemoryBarrier) - #undef MemoryBarrier +// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. +#if defined( MemoryBarrier ) +# undef MemoryBarrier #endif -#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS) -# if defined(__clang__) -# if __has_feature(cxx_unrestricted_unions) -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif # endif -# elif defined(__GNUC__) -# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) -# if 40600 <= GCC_VERSION -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +#endif + +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline # endif -# elif defined(_MSC_VER) -# if 1900 <= _MSC_VER -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +#endif + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if __cpp_constexpr >= 201304 +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 # endif -# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR +# define VULKAN_HPP_CONSTEXPR_14 +# define VULKAN_HPP_CONST_OR_CONSTEXPR const #endif -#if !defined(VULKAN_HPP_INLINE) -# if defined(__clang__) -# if __has_attribute(always_inline) -# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT # else -# define VULKAN_HPP_INLINE inline +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept +# else +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS +# endif # endif -# elif defined(__GNUC__) -# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ -# elif defined(_MSC_VER) -# define VULKAN_HPP_INLINE inline -# else -# define VULKAN_HPP_INLINE inline -# endif #endif -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) -# define VULKAN_HPP_TYPESAFE_EXPLICIT +#if 14 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] #else -# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +# define VULKAN_HPP_DEPRECATED( msg ) #endif -#if defined(_MSC_VER) && (_MSC_VER <= 1800) -# define VULKAN_HPP_CONSTEXPR -# define VULKAN_HPP_CONST_OR_CONSTEXPR const +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif #else -# define VULKAN_HPP_CONSTEXPR constexpr -# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS #endif -#if !defined(VULKAN_HPP_NAMESPACE) -#define VULKAN_HPP_NAMESPACE vk +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk #endif -#define VULKAN_HPP_STRINGIFY2(text) #text -#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text) -#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE) +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) namespace VULKAN_HPP_NAMESPACE { -#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template class ArrayProxy { public: - VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) - : m_count(0) - , m_ptr(nullptr) + VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template ::value, int>::type = 0> + ArrayProxy( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) {} - ArrayProxy(T & ptr) - : m_count(1) - , m_ptr(&ptr) + template ::value, int>::type = 0> + ArrayProxy( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) {} - ArrayProxy(uint32_t count, T * ptr) - : m_count(count) - , m_ptr(ptr) +# if __GNUC__ >= 9 +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Winit-list-lifetime" +# endif + + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template - ArrayProxy(std::array::type, N> & data) - : m_count(N) - , m_ptr(data.data()) + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template - ArrayProxy(std::array::type, N> const& data) - : m_count(N) - , m_ptr(data.data()) + ArrayProxy( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template ::type>> - ArrayProxy(std::vector::type, Allocator> & data) - : m_count(static_cast(data.size())) - , m_ptr(data.data()) + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) {} - template ::type>> - ArrayProxy(std::vector::type, Allocator> const& data) - : m_count(static_cast(data.size())) - , m_ptr(data.data()) +# if __GNUC__ >= 9 +# pragma GCC diagnostic pop +# endif + + // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly + // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement. + template ().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> + ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) {} - ArrayProxy(std::initializer_list const& data) - : m_count(static_cast(data.end() - data.begin())) - , m_ptr(data.begin()) + template ().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> + ArrayProxy( V & v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) {} - const T * begin() const + const T * begin() const VULKAN_HPP_NOEXCEPT { return m_ptr; } - const T * end() const + const T * end() const VULKAN_HPP_NOEXCEPT { return m_ptr + m_count; } - const T & front() const + const T & front() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); + VULKAN_HPP_ASSERT( m_count && m_ptr ); return *m_ptr; } - const T & back() const + const T & back() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT(m_count && m_ptr); - return *(m_ptr + m_count - 1); + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); } - bool empty() const + bool empty() const VULKAN_HPP_NOEXCEPT { - return (m_count == 0); + return ( m_count == 0 ); } - uint32_t size() const + uint32_t size() const VULKAN_HPP_NOEXCEPT { return m_count; } - T * data() const + T * data() const VULKAN_HPP_NOEXCEPT { return m_ptr; } private: - uint32_t m_count; - T * m_ptr; - }; -#endif - - template struct FlagTraits - { - enum { allFlags = 0 }; + uint32_t m_count; + T * m_ptr; }; - template - class Flags + template + class ArrayProxyNoTemporaries { public: - VULKAN_HPP_CONSTEXPR Flags() - : m_mask(0) + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + {} + + ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template + ArrayProxyNoTemporaries( V && value ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type && value ) = delete; + + ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list const && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) + VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const && list ) = delete; + + ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + ArrayProxyNoTemporaries( std::initializer_list && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + {} + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> && list ) = delete; + + // Any type with a .data() return type implicitly convertible to T*, and a // .size() return type implicitly + // convertible to size_t. + template ().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> + ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) + {} + + const T * begin() const VULKAN_HPP_NOEXCEPT { + return m_ptr; } - Flags(BitType bit) - : m_mask(static_cast(bit)) + const T * end() const VULKAN_HPP_NOEXCEPT { + return m_ptr + m_count; } - Flags(Flags const& rhs) - : m_mask(rhs.m_mask) + const T & front() const VULKAN_HPP_NOEXCEPT { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; } - explicit Flags(MaskType flags) - : m_mask(flags) + const T & back() const VULKAN_HPP_NOEXCEPT { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); } - Flags & operator=(Flags const& rhs) + bool empty() const VULKAN_HPP_NOEXCEPT { - m_mask = rhs.m_mask; - return *this; + return ( m_count == 0 ); } - Flags & operator|=(Flags const& rhs) + uint32_t size() const VULKAN_HPP_NOEXCEPT { - m_mask |= rhs.m_mask; - return *this; + return m_count; } - Flags & operator&=(Flags const& rhs) + T * data() const VULKAN_HPP_NOEXCEPT { - m_mask &= rhs.m_mask; - return *this; + return m_ptr; } - Flags & operator^=(Flags const& rhs) + private: + uint32_t m_count; + T * m_ptr; + }; +#endif + + template + class ArrayWrapper1D : public std::array + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) + {} + +#if ( VK_USE_64_BIT_PTR_DEFINES == 0 ) + // on 32 bit compiles, needs overloads on index type int to resolve ambiguities + VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT { - m_mask ^= rhs.m_mask; - return *this; + return std::array::operator[]( index ); } - Flags operator|(Flags const& rhs) const + T & operator[]( int index ) VULKAN_HPP_NOEXCEPT { - Flags result(*this); - result |= rhs; - return result; + return std::array::operator[]( index ); } +#endif - Flags operator&(Flags const& rhs) const + operator T const *() const VULKAN_HPP_NOEXCEPT { - Flags result(*this); - result &= rhs; - return result; + return this->data(); } - Flags operator^(Flags const& rhs) const + operator T *() VULKAN_HPP_NOEXCEPT { - Flags result(*this); - result ^= rhs; - return result; + return this->data(); } - bool operator!() const + template ::value, int>::type = 0> + operator std::string() const { - return !m_mask; + return std::string( this->data() ); } - Flags operator~() const +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + operator std::string_view() const { - Flags result(*this); - result.m_mask ^= FlagTraits::allFlags; - return result; + return std::string_view( this->data() ); } +#endif - bool operator==(Flags const& rhs) const +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, int>::type = 0> + std::strong_ordering operator<=>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_mask == rhs.m_mask; + return *static_cast const *>( this ) <=> *static_cast const *>( &rhs ); + } +#else + template ::value, int>::type = 0> + bool operator<( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) < *static_cast const *>( &rhs ); } - bool operator!=(Flags const& rhs) const + template ::value, int>::type = 0> + bool operator<=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_mask != rhs.m_mask; + return *static_cast const *>( this ) <= *static_cast const *>( &rhs ); } - explicit operator bool() const + template ::value, int>::type = 0> + bool operator>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !!m_mask; + return *static_cast const *>( this ) > *static_cast const *>( &rhs ); } - explicit operator MaskType() const + template ::value, int>::type = 0> + bool operator>=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_mask; + return *static_cast const *>( this ) >= *static_cast const *>( &rhs ); } +#endif - private: - MaskType m_mask; + template ::value, int>::type = 0> + bool operator==( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) == *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) != *static_cast const *>( &rhs ); + } }; - template - Flags operator|(BitType bit, Flags const& flags) + // specialization of relational operators between std::string and arrays of chars + template + bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { - return flags | bit; + return lhs < rhs.data(); } - template - Flags operator&(BitType bit, Flags const& flags) + template + bool operator<=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { - return flags & bit; + return lhs <= rhs.data(); } - template - Flags operator^(BitType bit, Flags const& flags) + template + bool operator>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { - return flags ^ bit; + return lhs > rhs.data(); } - template - class Optional + template + bool operator>=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { - public: - Optional(RefType & reference) { m_ptr = &reference; } - Optional(RefType * ptr) { m_ptr = ptr; } - Optional(std::nullptr_t) { m_ptr = nullptr; } - - operator RefType*() const { return m_ptr; } - RefType const* operator->() const { return m_ptr; } - explicit operator bool() const { return !!m_ptr; } - - private: - RefType *m_ptr; - }; - - template struct isStructureChainValid { enum { value = false }; }; + return lhs >= rhs.data(); + } - template - struct TypeList + template + bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { - using list = P; - using last = T; - }; + return lhs == rhs.data(); + } - template - struct extendCheck + template + bool operator!=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { - static const bool valid = isStructureChainValid::value || extendCheck::valid; - }; + return lhs != rhs.data(); + } - template - struct extendCheck,X> + template + class ArrayWrapper2D : public std::array, N> { - static const bool valid = isStructureChainValid::value; - }; + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array, N>() {} - template - struct extendCheck - { - static const bool valid = true; + VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array, N> const & data ) VULKAN_HPP_NOEXCEPT + : std::array, N>( *reinterpret_cast, N> const *>( &data ) ) + {} }; - template - class StructureChainElement + template + struct FlagTraits { - public: - explicit operator Element&() { return value; } - explicit operator const Element&() const { return value; } - private: - Element value; + enum + { + allFlags = 0 + }; }; - template - class StructureChain : private StructureChainElement... + template + class Flags { public: - StructureChain() - { - link(); - } - - StructureChain(StructureChain const &rhs) - { - linkAndCopy(rhs); - } + using MaskType = typename std::underlying_type::type; - StructureChain(StructureElements const &... elems) - { - linkAndCopyElements(elems...); - } + // constructors + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} - StructureChain& operator=(StructureChain const &rhs) - { - linkAndCopy(rhs); - return *this; - } + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} - template ClassType& get() { return static_cast(*this);} + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; - template - std::tuple get() - { - return std::tuple_cat( - std::make_tuple(get(),get()), - std::make_tuple(get()...) - ); - } + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} - private: - template - void link() + // relational operators +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); + return m_mask < rhs.m_mask; } - template - void link() + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x.pNext = &y; - link, Y, Z...>(); + return m_mask <= rhs.m_mask; } - template - void linkAndCopy(StructureChain const &rhs) + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - static_cast(*this) = static_cast(rhs); + return m_mask > rhs.m_mask; } - template - void linkAndCopy(StructureChain const &rhs) + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x = static_cast(rhs); - x.pNext = &y; - linkAndCopy, Y, Z...>(rhs); + return m_mask >= rhs.m_mask; } - template - void linkAndCopyElements(X const &xelem) + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - static_cast(*this) = xelem; + return m_mask == rhs.m_mask; } - template - void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - static_assert(extendCheck::valid, "The structure chain is not valid!"); - X& x = static_cast(*this); - Y& y = static_cast(*this); - x = xelem; - x.pNext = &y; - linkAndCopyElements, Y, Z...>(yelem, zelem...); + return m_mask != rhs.m_mask; } - }; - -#if !defined(VULKAN_HPP_NO_SMART_HANDLE) - template class UniqueHandleTraits; - - template - class UniqueHandle : public UniqueHandleTraits::deleter - { - private: - using Deleter = typename UniqueHandleTraits::deleter; - - public: - using element_type = Type; - - explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() ) - : Deleter( deleter) - , m_value( value ) - {} - - UniqueHandle( UniqueHandle const& ) = delete; - - UniqueHandle( UniqueHandle && other ) - : Deleter( std::move( static_cast( other ) ) ) - , m_value( other.release() ) - {} +#endif - ~UniqueHandle() + // logical operator + VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT { - if ( m_value ) this->destroy( m_value ); + return !m_mask; } - UniqueHandle & operator=( UniqueHandle const& ) = delete; - - UniqueHandle & operator=( UniqueHandle && other ) + // bitwise operators + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - reset( other.release() ); - *static_cast(this) = std::move( static_cast(other) ); - return *this; + return Flags( m_mask & rhs.m_mask ); } - explicit operator bool() const + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return m_value.operator bool(); + return Flags( m_mask | rhs.m_mask ); } - Type const* operator->() const + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT { - return &m_value; + return Flags( m_mask ^ rhs.m_mask ); } - Type * operator->() + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT { - return &m_value; + return Flags( m_mask ^ FlagTraits::allFlags ); } - Type const& operator*() const - { - return m_value; - } + // assignment operators + VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; - Type & operator*() + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { - return m_value; + m_mask |= rhs.m_mask; + return *this; } - const Type & get() const - { - return m_value; - } - - Type & get() + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { - return m_value; + m_mask &= rhs.m_mask; + return *this; } - void reset( Type const& value = Type() ) + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT { - if ( m_value != value ) - { - if ( m_value ) this->destroy( m_value ); - m_value = value; - } + m_mask ^= rhs.m_mask; + return *this; } - Type release() + // cast operators + explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT { - Type value = m_value; - m_value = nullptr; - return value; + return !!m_mask; } - void swap( UniqueHandle & rhs ) + explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT { - std::swap(m_value, rhs.m_value); - std::swap(static_cast(*this), static_cast(rhs)); + return m_mask; } +#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) + public: +#else private: - Type m_value; +#endif + MaskType m_mask; }; - template - VULKAN_HPP_INLINE std::vector uniqueToRaw(std::vector const& handles) +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - std::vector newBuffer(handles.size()); - std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); }); - return newBuffer; + return flags.operator>( bit ); } - template - VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) + template + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - lhs.swap( rhs ); + return flags.operator>=( bit ); } -#endif -#if !defined(VK_NO_PROTOTYPES) - class DispatchLoaderStatic + template + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT { - public: - VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const - { - return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); - } + return flags.operator<( bit ); + } - VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const - { - return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); - } + template + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<=( bit ); + } - VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const - { - return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); - } + template + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator==( bit ); + } - VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const - { - return ::vkEnumerateInstanceVersion( pApiVersion ); - } + template + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator!=( bit ); + } +#endif - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const + // bitwise operators + template + VULKAN_HPP_CONSTEXPR Flags operator&( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator&( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator|( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator^( bit ); + } + + template + class Optional + { + public: + Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT { - return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + m_ptr = &reference; } - - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + m_ptr = ptr; } - - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + m_ptr = nullptr; } - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const + operator RefType *() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + return m_ptr; } - - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + RefType const * operator->() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + return m_ptr; } - - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const + explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + return !!m_ptr; } - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo ) const + private: + RefType * m_ptr; + }; + + template + struct StructExtends + { + enum { - return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } + value = false + }; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = false; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = std::is_same::value || IsPartOfStructureChain::valid; + }; + + template + struct StructureChainContains + { + static const bool value = + std::is_same>::type>::value || + StructureChainContains::value; + }; + + template + struct StructureChainContains<0, T, ChainElements...> + { + static const bool value = + std::is_same>::type>::value; + }; + + template + struct StructureChainValidation + { + using TestType = typename std::tuple_element>::type; + static const bool valid = + StructExtends>::type>::value && + ( TestType::allowDuplicate || !StructureChainContains::value ) && + StructureChainValidation::valid; + }; + + template + struct StructureChainValidation<0, ChainElements...> + { + static const bool valid = true; + }; - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const + template + class StructureChain : public std::tuple + { + public: + StructureChain() VULKAN_HPP_NOEXCEPT { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); } - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const + StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( rhs ) { - return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); } - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const + StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT + : std::tuple( std::forward>( rhs ) ) { - return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); } - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const + StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) { - return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + static_assert( StructureChainValidation::valid, + "The structure chain is not valid!" ); + link(); } - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const + StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + std::tuple::operator=( rhs ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); + return *this; } - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const + StructureChain & operator=( StructureChain && rhs ) = delete; + + template >::type, size_t Which = 0> + T & get() VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + return std::get::value>( + static_cast &>( *this ) ); } - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const + template >::type, size_t Which = 0> + T const & get() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + return std::get::value>( + static_cast const &>( *this ) ); } - void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const + template + std::tuple get() VULKAN_HPP_NOEXCEPT { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + return std::tie( get(), get(), get()... ); } - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const + template + std::tuple get() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + return std::tie( get(), get(), get()... ); } - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const + template + typename std::enable_if< + std::is_same>::type>::value && + ( Which == 0 ), + bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + return true; } - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + static_assert( IsPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + return isLinked( reinterpret_cast( &get() ) ); } - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + void>::type + relink() VULKAN_HPP_NOEXCEPT { - return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + static_assert( IsPartOfStructureChain::valid, + "Can't relink Structure that's not part of this StructureChain!" ); + auto pNext = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( !isLinked( pNext ) ); + auto & headElement = std::get<0>( static_cast &>( *this ) ); + pNext->pNext = reinterpret_cast( headElement.pNext ); + headElement.pNext = pNext; } - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const + template + typename std::enable_if< + !std::is_same>::type>::value || + ( Which != 0 ), + void>::type + unlink() VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + static_assert( IsPartOfStructureChain::valid, + "Can't unlink Structure that's not part of this StructureChain!" ); + unlink( reinterpret_cast( &get() ) ); } - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const + private: + template + struct ChainElementIndex : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : ChainElementIndex + {}; + + template + struct ChainElementIndex::value, void>::type, + First, + Types...> : std::integral_constant + {}; + + bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + VkBaseInStructure const * elementPtr = reinterpret_cast( + &std::get<0>( static_cast const &>( *this ) ) ); + while ( elementPtr ) + { + if ( elementPtr->pNext == pNext ) + { + return true; + } + elementPtr = elementPtr->pNext; + } + return false; } - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + auto & x = std::get( static_cast &>( *this ) ); + x.pNext = &std::get( static_cast &>( *this ) ); + link(); } - void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + {} + + void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src ) { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + while ( src->pNext ) + { + std::ptrdiff_t offset = + reinterpret_cast( src->pNext ) - reinterpret_cast( srcBase ); + dst->pNext = reinterpret_cast( reinterpret_cast( dstBase ) + offset ); + dst = dst->pNext; + src = src->pNext; + } + dst->pNext = nullptr; } - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const + void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + VkBaseOutStructure * elementPtr = + reinterpret_cast( &std::get<0>( static_cast &>( *this ) ) ); + while ( elementPtr && ( elementPtr->pNext != pNext ) ) + { + elementPtr = elementPtr->pNext; + } + if ( elementPtr ) + { + elementPtr->pNext = pNext->pNext; + } + else + { + VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked ! + } } + }; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + + public: + using element_type = Type; + + UniqueHandle() : Deleter(), m_value() {} + + explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter ) + , m_value( value ) + {} + + UniqueHandle( UniqueHandle const & ) = delete; + + UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + {} - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const + ~UniqueHandle() VULKAN_HPP_NOEXCEPT { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + if ( m_value ) + { + this->destroy( m_value ); + } } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const + UniqueHandle & operator=( UniqueHandle const & ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + reset( other.release() ); + *static_cast( this ) = std::move( static_cast( other ) ); + return *this; } - void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const + explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + return m_value.operator bool(); } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const + Type const * operator->() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + return &m_value; } - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + Type * operator->() VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + return &m_value; } - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + Type const & operator*() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + return m_value; } - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + Type & operator*() VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + return m_value; } - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const + const Type & get() const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + return m_value; } - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + Type & get() VULKAN_HPP_NOEXCEPT { - return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + return m_value; } - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const + void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + if ( m_value != value ) + { + if ( m_value ) + { + this->destroy( m_value ); + } + m_value = value; + } } - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + Type release() VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + Type value = m_value; + m_value = nullptr; + return value; } - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + std::swap( m_value, rhs.m_value ); + std::swap( static_cast( *this ), static_cast( rhs ) ); } - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE std::vector + uniqueToRaw( std::vector const & handles ) + { + std::vector newBuffer( handles.size() ); + std::transform( + handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); + return newBuffer; + } + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, + UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + lhs.swap( rhs ); + } +#endif + + class DispatchLoaderBase + { + public: + DispatchLoaderBase() = default; + DispatchLoaderBase( std::nullptr_t ) +#if !defined( NDEBUG ) + : m_valid( false ) +#endif + {} + +#if !defined( NDEBUG ) + size_t getVkHeaderVersion() const { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + VULKAN_HPP_ASSERT( m_valid ); + return vkHeaderVersion; } - void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + private: + size_t vkHeaderVersion = VK_HEADER_VERSION; + bool m_valid = true; +#endif + }; + +#if !defined( VK_NO_PROTOTYPES ) + class DispatchLoaderStatic : public DispatchLoaderBase + { + public: + //=== VK_VERSION_1_0 === + + VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); } - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + return ::vkDestroyInstance( instance, pAllocator ); } - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + VkResult vkEnumeratePhysicalDevices( VkInstance instance, + uint32_t * pPhysicalDeviceCount, + VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); } - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); } - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); } - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + return ::vkGetPhysicalDeviceImageFormatProperties( + physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); } - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); } - void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + return ::vkGetPhysicalDeviceQueueFamilyProperties( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const + void vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); } - void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + return ::vkGetInstanceProcAddr( instance, pName ); } - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + return ::vkGetDeviceProcAddr( device, pName ); } - void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndRenderPass( commandBuffer ); + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); } - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + return ::vkDestroyDevice( device, pAllocator ); } - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); } - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); } - void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); } - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); } - void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const + void vkGetDeviceQueue( VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdNextSubpass( commandBuffer, contents ); + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); } - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const + VkResult vkQueueSubmit( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); } - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + return ::vkQueueWaitIdle( queue ); } - void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo ); + return ::vkDeviceWaitIdle( device ); } - void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); } - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const + void vkFreeMemory( VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + return ::vkFreeMemory( device, memory, pAllocator ); } - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const + VkResult vkMapMemory( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void ** ppData ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); } - void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo ); + return ::vkUnmapMemory( device, memory ); } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const + VkResult vkFlushMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); } - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); } - void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const + void vkGetDeviceMemoryCommitment( VkDevice device, + VkDeviceMemory memory, + VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); } - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const + VkResult vkBindBufferMemory( VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); } - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const + VkResult vkBindImageMemory( VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + return ::vkBindImageMemory( device, image, memory, memoryOffset ); } - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const + void vkGetBufferMemoryRequirements( VkDevice device, + VkBuffer buffer, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); } - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const + void vkGetImageMemoryRequirements( VkDevice device, + VkImage image, + VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); } - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + return ::vkGetImageSparseMemoryRequirements( + device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties( + physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); } - void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const + VkResult vkQueueBindSparse( VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo * pBindInfo, + VkFence fence ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); } - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); } - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const + void vkDestroyFence( VkDevice device, + VkFence fence, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + return ::vkDestroyFence( device, fence, pAllocator ); } - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + return ::vkResetFences( device, fenceCount, pFences ); } - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + return ::vkGetFenceStatus( device, fence ); } - void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const + VkResult vkWaitForFences( VkDevice device, + uint32_t fenceCount, + const VkFence * pFences, + VkBool32 waitAll, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); } - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); } - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const + void vkDestroySemaphore( VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + return ::vkDestroySemaphore( device, semaphore, pAllocator ); } - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); } - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const + void vkDestroyEvent( VkDevice device, + VkEvent event, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + return ::vkDestroyEvent( device, event, pAllocator ); } - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + return ::vkGetEventStatus( device, event ); } - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + return ::vkSetEvent( device, event ); } - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + return ::vkResetEvent( device, event ); } - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); } - void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const + void vkDestroyQueryPool( VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); } - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); } - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); } - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const + void vkDestroyBuffer( VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); + return ::vkDestroyBuffer( device, buffer, pAllocator ); } - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); } - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const + void vkDestroyBufferView( VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + return ::vkDestroyBufferView( device, bufferView, pAllocator ); } - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); } - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const + void vkDestroyImage( VkDevice device, + VkImage image, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + return ::vkDestroyImage( device, image, pAllocator ); } - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); } - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT { - return ::vkEndCommandBuffer( commandBuffer ); + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); } - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const + void vkDestroyImageView( VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkResetCommandBuffer( commandBuffer, flags ); + return ::vkDestroyImageView( device, imageView, pAllocator ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const + void vkDestroyShaderModule( VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); } - VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); } - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const + void vkDestroyPipelineCache( VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); } - VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const + VkResult vkGetPipelineCacheData( VkDevice device, + VkPipelineCache pipelineCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); } - VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const + VkResult vkMergePipelineCaches( VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); } - VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + return ::vkCreateGraphicsPipelines( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const + void vkDestroyPipeline( VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + return ::vkDestroyPipeline( device, pipeline, pAllocator ); } - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); } - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const + void vkDestroyPipelineLayout( VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); } - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); } - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const + void vkDestroySampler( VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + return ::vkDestroySampler( device, sampler, pAllocator ); } - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); } - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCompileDeferredNV( device, pipeline, shader ); + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); } - VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); } - VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const + void vkDestroyDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); } - VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const + VkResult vkResetDescriptorPool( VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + return ::vkResetDescriptorPool( device, descriptorPool, flags ); } - VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); } - VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); } - VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + return ::vkUpdateDescriptorSets( + device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); } - VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); } - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const + void vkDestroyFramebuffer( VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); } - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); } - VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const + void vkDestroyRenderPass( VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); } - VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + void vkGetRenderAreaGranularity( VkDevice device, + VkRenderPass renderPass, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); } - VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); } - VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + void vkDestroyCommandPool( VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); } - VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const + VkResult vkResetCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + return ::vkResetCommandPool( device, commandPool, flags ); } - VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); } - VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); } - VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable ); + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); } - VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + return ::vkEndCommandBuffer( commandBuffer ); } - VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + return ::vkResetCommandBuffer( commandBuffer, flags ); } - VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); } - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); } - VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const + void vkCmdSetScissor( VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); } - VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); } - VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, + const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); } - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); } - VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); } - VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); } - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); } - VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + return ::vkCmdBindDescriptorSets( commandBuffer, + pipelineBindPoint, + layout, + firstSet, + descriptorSetCount, + pDescriptorSets, + dynamicOffsetCount, + pDynamicOffsets ); } - VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); } - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); } - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const + void vkCmdDraw( VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { - return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } - void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyBuffer( device, buffer, pAllocator ); + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyBufferView( device, bufferView, pAllocator ); + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDispatch( VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); } - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); } - void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); } - void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + return ::vkCmdCopyImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + return ::vkCmdBlitImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); } - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDevice( device, pAllocator ); + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyEvent( device, event, pAllocator ); + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyFence( device, fence, pAllocator ); + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); } - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); } - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyImage( device, image, pAllocator ); + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); } - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyImageView( device, imageView, pAllocator ); + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); } - void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator ); + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); } - void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator ); + return ::vkCmdResolveImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const + void vkCmdSetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyPipeline( device, pipeline, pAllocator ); + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); } - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const + void vkCmdResetEvent( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); } - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); } - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); } - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); } - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroySampler( device, sampler, pAllocator ); + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); } - void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + return ::vkCmdCopyQueryPoolResults( + commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); } - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); } - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroySemaphore( device, semaphore, pAllocator ); + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); } - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + return ::vkCmdNextSubpass( commandBuffer, contents ); } - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + return ::vkCmdEndRenderPass( commandBuffer ); } - void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); } - VkResult vkDeviceWaitIdle( VkDevice device ) const + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT { - return ::vkDeviceWaitIdle( device ); + return ::vkEnumerateInstanceVersion( pApiVersion ); } - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const + VkResult vkBindBufferMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { - return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); } - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const + VkResult vkBindImageMemory2( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { - return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); } - void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + return ::vkGetDeviceGroupPeerMemoryFeatures( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } - VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); } - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkFreeMemory( device, memory, pAllocator ); + return ::vkCmdDispatchBase( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo ) const + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + return ::vkGetImageSparseMemoryRequirements2( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); } - void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); } - void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); } - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); } - void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + return ::vkGetPhysicalDeviceQueueFamilyProperties2( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const + void vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); } - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const + void vkTrimCommandPool( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + return ::vkTrimCommandPool( device, commandPool, flags ); } - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const + void vkGetDeviceQueue2( VkDevice device, + const VkDeviceQueueInfo2 * pQueueInfo, + VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); } - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceProcAddr( device, pName ); + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); } - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); } - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + return ::vkGetPhysicalDeviceExternalBufferProperties( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetEventStatus( device, event ); + return ::vkGetPhysicalDeviceExternalFenceProperties( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); } - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const + void vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } - VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetFenceStatus( device, fence ); + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + return ::vkCmdDrawIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + return ::vkCmdDrawIndexedIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); } - void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); } - void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); } - void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); } - void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const + void vkResetQueryPool( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); } - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const + VkResult + vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); } - void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const + VkResult vkWaitSemaphores( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); } - uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageViewHandleNVX( device, pInfo ); + return ::vkSignalSemaphore( device, pSignalInfo ); } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + return ::vkGetBufferDeviceAddress( device, pInfo ); } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); } - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); } - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const + //=== VK_VERSION_1_3 === + + VkResult + vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + VkResult vkCreatePrivateDataSlot( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const + void vkDestroyPrivateDataSlot( VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const + VkResult vkSetPrivateData( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const + void vkGetPrivateData( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); } - VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const + void vkCmdSetEvent2( VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); } - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const + void vkCmdResetEvent2( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); } - VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const + void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); } - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const + void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, + const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); } - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const + void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); } - VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const + VkResult vkQueueSubmit2( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2 * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); } - VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const + void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); } - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const + void vkCmdCopyImage2( VkCommandBuffer commandBuffer, + const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); } - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const + void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); } - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const + void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + void vkCmdBlitImage2( VkCommandBuffer commandBuffer, + const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const + void vkCmdResolveImage2( VkCommandBuffer commandBuffer, + const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); } - VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const + void vkCmdBeginRendering( VkCommandBuffer commandBuffer, + const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); } - VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const + void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + return ::vkCmdEndRendering( commandBuffer ); } - VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const + void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetSwapchainStatusKHR( device, swapchain ); + return ::vkCmdSetCullMode( commandBuffer, cullMode ); } - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const + void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + return ::vkCmdSetFrontFace( commandBuffer, frontFace ); } - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const + void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { - return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const + void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const + void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const + void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + return ::vkCmdBindVertexBuffers2( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const + void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const + void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const + void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); } - VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const + void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); } - VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const + void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); } - VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); } - VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); } - VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const + void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices ); + return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const + void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const + void vkGetDeviceBufferMemoryRequirements( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); } - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const + void vkGetDeviceImageMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkResetCommandPool( device, commandPool, flags ); + return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); } - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const + void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); + return ::vkGetDeviceImageSparseMemoryRequirements( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - VkResult vkResetEvent( VkDevice device, VkEvent event ) const + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkResetEvent( device, event ); + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); } - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT { - return ::vkResetFences( device, fenceCount, pFences ); + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); } - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); } - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); } - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); } - VkResult vkSetEvent( VkDevice device, VkEvent event ) const + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT { - return ::vkSetEvent( device, event ); + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); } - void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const + void vkDestroySwapchainKHR( VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); } - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); } - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const + VkResult vkAcquireNextImageKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT { - return ::vkTrimCommandPool( device, commandPool, flags ); + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); } - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + return ::vkQueuePresentKHR( queue, pPresentInfo ); } - void vkUninitializePerformanceApiINTEL( VkDevice device ) const + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT { - return ::vkUninitializePerformanceApiINTEL( device ); + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); } - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const + VkResult vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT { - return ::vkUnmapMemory( device, memory ); + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); } - VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT { - return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices ); + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); } - void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const + VkResult vkAcquireNextImage2KHR( VkDevice device, + const VkAcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); } - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); } - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); } - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); } -#ifdef VK_USE_PLATFORM_IOS_MVK - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); } -#endif /*VK_USE_PLATFORM_FUCHSIA*/ +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); } -#endif /*VK_USE_PLATFORM_METAL_EXT*/ +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === -#ifdef VK_USE_PLATFORM_GGP - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); } -#endif /*VK_USE_PLATFORM_VI_NN*/ +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); } - void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + return ::vkDebugReportMessageEXT( + instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); } - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const + //=== VK_EXT_debug_marker === + + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, + const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); } - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, + const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroyInstance( instance, pAllocator ); + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); } - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); } - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); } - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); } - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); } - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetInstanceProcAddr( instance, pName ); + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( + physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); } - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); } -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const + void vkDestroyVideoSessionKHR( VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const + VkResult vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VkVideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + return ::vkGetVideoSessionMemoryRequirementsKHR( + device, videoSession, pVideoSessionMemoryRequirementsCount, pVideoSessionMemoryRequirements ); } - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const + VkResult + vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VkVideoBindMemoryKHR * pVideoSessionBindMemories ) const VULKAN_HPP_NOEXCEPT { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + return ::vkBindVideoSessionMemoryKHR( + device, videoSession, videoSessionBindMemoryCount, pVideoSessionBindMemories ); } - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const + VULKAN_HPP_NOEXCEPT { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); } - VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const + VULKAN_HPP_NOEXCEPT { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); } - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); } - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); } - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); } - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR * pFrameInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + return ::vkCmdDecodeVideoKHR( commandBuffer, pFrameInfo ); } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + return ::vkCmdBindTransformFeedbackBuffersEXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); } - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + return ::vkCmdBeginTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + return ::vkCmdEndTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); } - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); } - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + return ::vkCmdDrawIndirectByteCountEXT( + commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); } - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); } - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); } - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const + void vkDestroyCuModuleNVX( VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); } - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const + void vkDestroyCuFunctionNVX( VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); } - VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); } - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, + const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + return ::vkGetImageViewHandleNVX( device, pInfo ); } - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const + VkResult vkGetImageViewAddressNVX( VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); } - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + return ::vkCmdDrawIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + return ::vkCmdDrawIndexedIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const + //=== VK_AMD_shader_info === + + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); } - void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const + //=== VK_KHR_dynamic_rendering === + + void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, + const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); } - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const + void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + return ::vkCmdEndRenderingKHR( commandBuffer ); } - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === - void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits ); + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); } - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); } - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const + void vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); } - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const + //=== VK_KHR_device_group === + + void + vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); } - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + return ::vkCmdDispatchBaseKHR( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const + void vkTrimCommandPoolKHR( VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); } - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const + //=== VK_KHR_device_group_creation === + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + return ::vkEnumeratePhysicalDeviceGroupsKHR( + instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const + //=== VK_KHR_external_memory_capabilities === + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, + const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const + VkResult vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const + //=== VK_KHR_external_memory_fd === + + VkResult + vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); } - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VkResult vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const + //=== VK_KHR_external_semaphore_fd === + + VkResult + vkImportSemaphoreFdKHR( VkDevice device, + const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); } - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const + VkResult vkGetSemaphoreFdKHR( VkDevice device, + const VkSemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); } -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const + //=== VK_KHR_push_descriptor === + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + return ::vkCmdPushDescriptorSetKHR( + commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); } - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); } -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); } -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseDisplayEXT( physicalDevice, display ); } - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); } - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, + Display * dpy, + RROutput rrOutput, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { - return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); } - void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueueEndDebugUtilsLabelEXT( queue ); + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); } - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); } - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueuePresentKHR( queue, pPresentInfo ); + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); } - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT { - return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const + VULKAN_HPP_NOEXCEPT { - return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); } - VkResult vkQueueWaitIdle( VkQueue queue ) const + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const + VULKAN_HPP_NOEXCEPT { - return ::vkQueueWaitIdle( queue ); + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); } - }; - typedef DispatchLoaderStatic DispatchLoaderDefault; -#else // !defined(VK_NO_PROTOTYPES) - class NeedExplicitDispatchLoader; - typedef NeedExplicitDispatchLoader DispatchLoaderDefault; -#endif - - struct AllocationCallbacks; + //=== VK_EXT_discard_rectangles === - template - class ObjectDestroy - { - public: - ObjectDestroy( OwnerType owner = OwnerType(), Optional allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() ) - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( + commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } - OwnerType getOwner() const { return m_owner; } - Optional getAllocator() const { return m_allocationCallbacks; } + //=== VK_EXT_hdr_metadata === - protected: - template - void destroy(T t) - { - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } - private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; - }; + //=== VK_KHR_create_renderpass2 === - class NoParent; + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } - template - class ObjectDestroy - { - public: - ObjectDestroy( Optional allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() ) - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } - Optional getAllocator() const { return m_allocationCallbacks; } + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } - protected: - template - void destroy(T t) - { - t.destroy( m_allocationCallbacks, *m_dispatch ); - } + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } - private: - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; - }; + //=== VK_KHR_shared_presentable_image === - template - class ObjectFree - { - public: - ObjectFree( OwnerType owner = OwnerType(), Optional allocationCallbacks = nullptr, Dispatch const &dispatch = Dispatch() ) - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - {} + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } - OwnerType getOwner() const { return m_owner; } - Optional getAllocator() const { return m_allocationCallbacks; } + //=== VK_KHR_external_fence_capabilities === - protected: - template - void destroy(T t) - { - m_owner.free( t, m_allocationCallbacks, *m_dispatch ); - } + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( + physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } - private: - OwnerType m_owner; - Optional m_allocationCallbacks; - Dispatch const* m_dispatch; - }; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === - template - class PoolFree - { - public: - PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = Dispatch() ) - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) - {} - - OwnerType getOwner() const { return m_owner; } - PoolType getPool() const { return m_pool; } - - protected: - template - void destroy(T t) - { - m_owner.free( m_pool, t, *m_dispatch ); - } - - private: - OwnerType m_owner; - PoolType m_pool; - Dispatch const* m_dispatch; - }; + VkResult vkImportFenceWin32HandleKHR( + VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } - using Bool32 = uint32_t; - using DeviceAddress = uint64_t; - using DeviceSize = uint64_t; - using SampleMask = uint32_t; + VkResult vkGetFenceWin32HandleKHR( VkDevice device, + const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - enum class AccelerationStructureMemoryRequirementsTypeNV - { - eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, - eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, - eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - }; + //=== VK_KHR_external_fence_fd === - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) - { - switch ( value ) + VkResult vkImportFenceFdKHR( VkDevice device, + const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT { - case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object"; - case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch"; - case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch"; - default: return "invalid"; + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); } - } - - enum class AccelerationStructureTypeNV - { - eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, - eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - }; - VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeNV value ) - { - switch ( value ) + VkResult + vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { - case AccelerationStructureTypeNV::eTopLevel : return "TopLevel"; - case AccelerationStructureTypeNV::eBottomLevel : return "BottomLevel"; - default: return "invalid"; + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); } - } - enum class AttachmentLoadOp - { - eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, - eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, - eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE - }; + //=== VK_KHR_performance_query === - VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) - { - switch ( value ) + VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT { - case AttachmentLoadOp::eLoad : return "Load"; - case AttachmentLoadOp::eClear : return "Clear"; - case AttachmentLoadOp::eDontCare : return "DontCare"; - default: return "invalid"; + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); } - } - - enum class AttachmentStoreOp - { - eStore = VK_ATTACHMENT_STORE_OP_STORE, - eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE - }; - VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) - { - switch ( value ) + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT { - case AttachmentStoreOp::eStore : return "Store"; - case AttachmentStoreOp::eDontCare : return "DontCare"; - default: return "invalid"; + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); } - } - enum class BlendFactor - { - eZero = VK_BLEND_FACTOR_ZERO, - eOne = VK_BLEND_FACTOR_ONE, - eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, - eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, - eDstColor = VK_BLEND_FACTOR_DST_COLOR, - eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, - eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, - eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, - eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, - eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, - eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, - eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, - eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, - eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, - eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, - eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, - eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, - eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, - eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - }; - - VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) - { - switch ( value ) - { - case BlendFactor::eZero : return "Zero"; - case BlendFactor::eOne : return "One"; - case BlendFactor::eSrcColor : return "SrcColor"; - case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor"; - case BlendFactor::eDstColor : return "DstColor"; - case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor"; - case BlendFactor::eSrcAlpha : return "SrcAlpha"; - case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha"; - case BlendFactor::eDstAlpha : return "DstAlpha"; - case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha"; - case BlendFactor::eConstantColor : return "ConstantColor"; - case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor"; - case BlendFactor::eConstantAlpha : return "ConstantAlpha"; - case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha"; - case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate"; - case BlendFactor::eSrc1Color : return "Src1Color"; - case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color"; - case BlendFactor::eSrc1Alpha : return "Src1Alpha"; - case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha"; - default: return "invalid"; + VkResult vkAcquireProfilingLockKHR( VkDevice device, + const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); } - } - enum class BlendOp - { - eAdd = VK_BLEND_OP_ADD, - eSubtract = VK_BLEND_OP_SUBTRACT, - eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, - eMin = VK_BLEND_OP_MIN, - eMax = VK_BLEND_OP_MAX, - eZeroEXT = VK_BLEND_OP_ZERO_EXT, - eSrcEXT = VK_BLEND_OP_SRC_EXT, - eDstEXT = VK_BLEND_OP_DST_EXT, - eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, - eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, - eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, - eDstInEXT = VK_BLEND_OP_DST_IN_EXT, - eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, - eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, - eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, - eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, - eXorEXT = VK_BLEND_OP_XOR_EXT, - eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, - eScreenEXT = VK_BLEND_OP_SCREEN_EXT, - eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, - eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, - eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, - eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, - eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, - eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, - eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, - eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, - eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, - eInvertEXT = VK_BLEND_OP_INVERT_EXT, - eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, - eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, - eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, - eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, - eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, - ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, - eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, - eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, - eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, - eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, - eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, - ePlusEXT = VK_BLEND_OP_PLUS_EXT, - ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, - ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, - ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, - eMinusEXT = VK_BLEND_OP_MINUS_EXT, - eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, - eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, - eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, - eRedEXT = VK_BLEND_OP_RED_EXT, - eGreenEXT = VK_BLEND_OP_GREEN_EXT, - eBlueEXT = VK_BLEND_OP_BLUE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( BlendOp value ) - { - switch ( value ) - { - case BlendOp::eAdd : return "Add"; - case BlendOp::eSubtract : return "Subtract"; - case BlendOp::eReverseSubtract : return "ReverseSubtract"; - case BlendOp::eMin : return "Min"; - case BlendOp::eMax : return "Max"; - case BlendOp::eZeroEXT : return "ZeroEXT"; - case BlendOp::eSrcEXT : return "SrcEXT"; - case BlendOp::eDstEXT : return "DstEXT"; - case BlendOp::eSrcOverEXT : return "SrcOverEXT"; - case BlendOp::eDstOverEXT : return "DstOverEXT"; - case BlendOp::eSrcInEXT : return "SrcInEXT"; - case BlendOp::eDstInEXT : return "DstInEXT"; - case BlendOp::eSrcOutEXT : return "SrcOutEXT"; - case BlendOp::eDstOutEXT : return "DstOutEXT"; - case BlendOp::eSrcAtopEXT : return "SrcAtopEXT"; - case BlendOp::eDstAtopEXT : return "DstAtopEXT"; - case BlendOp::eXorEXT : return "XorEXT"; - case BlendOp::eMultiplyEXT : return "MultiplyEXT"; - case BlendOp::eScreenEXT : return "ScreenEXT"; - case BlendOp::eOverlayEXT : return "OverlayEXT"; - case BlendOp::eDarkenEXT : return "DarkenEXT"; - case BlendOp::eLightenEXT : return "LightenEXT"; - case BlendOp::eColordodgeEXT : return "ColordodgeEXT"; - case BlendOp::eColorburnEXT : return "ColorburnEXT"; - case BlendOp::eHardlightEXT : return "HardlightEXT"; - case BlendOp::eSoftlightEXT : return "SoftlightEXT"; - case BlendOp::eDifferenceEXT : return "DifferenceEXT"; - case BlendOp::eExclusionEXT : return "ExclusionEXT"; - case BlendOp::eInvertEXT : return "InvertEXT"; - case BlendOp::eInvertRgbEXT : return "InvertRgbEXT"; - case BlendOp::eLineardodgeEXT : return "LineardodgeEXT"; - case BlendOp::eLinearburnEXT : return "LinearburnEXT"; - case BlendOp::eVividlightEXT : return "VividlightEXT"; - case BlendOp::eLinearlightEXT : return "LinearlightEXT"; - case BlendOp::ePinlightEXT : return "PinlightEXT"; - case BlendOp::eHardmixEXT : return "HardmixEXT"; - case BlendOp::eHslHueEXT : return "HslHueEXT"; - case BlendOp::eHslSaturationEXT : return "HslSaturationEXT"; - case BlendOp::eHslColorEXT : return "HslColorEXT"; - case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT"; - case BlendOp::ePlusEXT : return "PlusEXT"; - case BlendOp::ePlusClampedEXT : return "PlusClampedEXT"; - case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT"; - case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT"; - case BlendOp::eMinusEXT : return "MinusEXT"; - case BlendOp::eMinusClampedEXT : return "MinusClampedEXT"; - case BlendOp::eContrastEXT : return "ContrastEXT"; - case BlendOp::eInvertOvgEXT : return "InvertOvgEXT"; - case BlendOp::eRedEXT : return "RedEXT"; - case BlendOp::eGreenEXT : return "GreenEXT"; - case BlendOp::eBlueEXT : return "BlueEXT"; - default: return "invalid"; + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); } - } - enum class BlendOverlapEXT - { - eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, - eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT - }; + //=== VK_KHR_get_surface_capabilities2 === - VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const + VULKAN_HPP_NOEXCEPT { - case BlendOverlapEXT::eUncorrelated : return "Uncorrelated"; - case BlendOverlapEXT::eDisjoint : return "Disjoint"; - case BlendOverlapEXT::eConjoint : return "Conjoint"; - default: return "invalid"; + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); } - } - - enum class BorderColor - { - eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, - eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, - eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, - eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, - eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE - }; - VULKAN_HPP_INLINE std::string to_string( BorderColor value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT { - case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack"; - case BorderColor::eIntTransparentBlack : return "IntTransparentBlack"; - case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack"; - case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack"; - case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite"; - case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite"; - default: return "invalid"; + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( + physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); } - } - enum class ChromaLocation - { - eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, - eMidpoint = VK_CHROMA_LOCATION_MIDPOINT, - eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN_KHR, - eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT_KHR - }; + //=== VK_KHR_get_display_properties2 === - VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT { - case ChromaLocation::eCositedEven : return "CositedEven"; - case ChromaLocation::eMidpoint : return "Midpoint"; - default: return "invalid"; + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } - } - enum class CoarseSampleOrderTypeNV - { - eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, - eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, - ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, - eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - }; + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } - VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) - { - switch ( value ) + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT { - case CoarseSampleOrderTypeNV::eDefault : return "Default"; - case CoarseSampleOrderTypeNV::eCustom : return "Custom"; - case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor"; - case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor"; - default: return "invalid"; + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); } - } - enum class ColorSpaceKHR - { - eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, - eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, - eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, - eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, - eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, - eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, - eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, - eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, - eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, - eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, - eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, - eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, - ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, - eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, - eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, - eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, - eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) - { - switch ( value ) - { - case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear"; - case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT"; - case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT"; - case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT"; - case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT"; - case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT"; - case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT"; - case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT"; - case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT"; - case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT"; - case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT"; - case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT"; - case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT"; - case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT"; - case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT"; - case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD"; - default: return "invalid"; + VkResult + vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); } - } - enum class CommandBufferLevel - { - ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY - }; +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === - VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) - { - switch ( value ) + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - case CommandBufferLevel::ePrimary : return "Primary"; - case CommandBufferLevel::eSecondary : return "Secondary"; - default: return "invalid"; + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); } - } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ - enum class CompareOp - { - eNever = VK_COMPARE_OP_NEVER, - eLess = VK_COMPARE_OP_LESS, - eEqual = VK_COMPARE_OP_EQUAL, - eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, - eGreater = VK_COMPARE_OP_GREATER, - eNotEqual = VK_COMPARE_OP_NOT_EQUAL, - eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, - eAlways = VK_COMPARE_OP_ALWAYS - }; +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === - VULKAN_HPP_INLINE std::string to_string( CompareOp value ) - { - switch ( value ) + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - case CompareOp::eNever : return "Never"; - case CompareOp::eLess : return "Less"; - case CompareOp::eEqual : return "Equal"; - case CompareOp::eLessOrEqual : return "LessOrEqual"; - case CompareOp::eGreater : return "Greater"; - case CompareOp::eNotEqual : return "NotEqual"; - case CompareOp::eGreaterOrEqual : return "GreaterOrEqual"; - case CompareOp::eAlways : return "Always"; - default: return "invalid"; + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); } - } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ - enum class ComponentSwizzle - { - eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, - eZero = VK_COMPONENT_SWIZZLE_ZERO, - eOne = VK_COMPONENT_SWIZZLE_ONE, - eR = VK_COMPONENT_SWIZZLE_R, - eG = VK_COMPONENT_SWIZZLE_G, - eB = VK_COMPONENT_SWIZZLE_B, - eA = VK_COMPONENT_SWIZZLE_A - }; + //=== VK_EXT_debug_utils === - VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) - { - switch ( value ) + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, + const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT { - case ComponentSwizzle::eIdentity : return "Identity"; - case ComponentSwizzle::eZero : return "Zero"; - case ComponentSwizzle::eOne : return "One"; - case ComponentSwizzle::eR : return "R"; - case ComponentSwizzle::eG : return "G"; - case ComponentSwizzle::eB : return "B"; - case ComponentSwizzle::eA : return "A"; - default: return "invalid"; + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); } - } - enum class ComponentTypeNV - { - eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, - eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, - eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, - eSint8 = VK_COMPONENT_TYPE_SINT8_NV, - eSint16 = VK_COMPONENT_TYPE_SINT16_NV, - eSint32 = VK_COMPONENT_TYPE_SINT32_NV, - eSint64 = VK_COMPONENT_TYPE_SINT64_NV, - eUint8 = VK_COMPONENT_TYPE_UINT8_NV, - eUint16 = VK_COMPONENT_TYPE_UINT16_NV, - eUint32 = VK_COMPONENT_TYPE_UINT32_NV, - eUint64 = VK_COMPONENT_TYPE_UINT64_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) - { - switch ( value ) - { - case ComponentTypeNV::eFloat16 : return "Float16"; - case ComponentTypeNV::eFloat32 : return "Float32"; - case ComponentTypeNV::eFloat64 : return "Float64"; - case ComponentTypeNV::eSint8 : return "Sint8"; - case ComponentTypeNV::eSint16 : return "Sint16"; - case ComponentTypeNV::eSint32 : return "Sint32"; - case ComponentTypeNV::eSint64 : return "Sint64"; - case ComponentTypeNV::eUint8 : return "Uint8"; - case ComponentTypeNV::eUint16 : return "Uint16"; - case ComponentTypeNV::eUint32 : return "Uint32"; - case ComponentTypeNV::eUint64 : return "Uint64"; - default: return "invalid"; + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, + const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); } - } - - enum class ConservativeRasterizationModeEXT - { - eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, - eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, - eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - }; - VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) - { - switch ( value ) + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { - case ConservativeRasterizationModeEXT::eDisabled : return "Disabled"; - case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate"; - case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate"; - default: return "invalid"; + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); } - } - enum class CopyAccelerationStructureModeNV - { - eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, - eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - }; + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } - VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeNV value ) - { - switch ( value ) + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { - case CopyAccelerationStructureModeNV::eClone : return "Clone"; - case CopyAccelerationStructureModeNV::eCompact : return "Compact"; - default: return "invalid"; + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); } - } - enum class CoverageModulationModeNV - { - eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, - eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, - eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, - eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV - }; + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } - VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) - { - switch ( value ) + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { - case CoverageModulationModeNV::eNone : return "None"; - case CoverageModulationModeNV::eRgb : return "Rgb"; - case CoverageModulationModeNV::eAlpha : return "Alpha"; - case CoverageModulationModeNV::eRgba : return "Rgba"; - default: return "invalid"; + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); } - } - enum class CoverageReductionModeNV - { - eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, - eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV - }; + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } - VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) - { - switch ( value ) + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT { - case CoverageReductionModeNV::eMerge : return "Merge"; - case CoverageReductionModeNV::eTruncate : return "Truncate"; - default: return "invalid"; + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); } - } - enum class DebugReportObjectTypeEXT - { - eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, - ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, - eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, - eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, - eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, - eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, - eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, - eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, - eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, - eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, - eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, - eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, - eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, - eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, - eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, - ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, - ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, - eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, - ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, - eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, - eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, - eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, - eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, - eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, - eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, - eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, - eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, - eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, - eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, - eObjectTableNVX = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, - eIndirectCommandsLayoutNVX = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, - eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, - eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, - eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, - eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, - eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) - { - switch ( value ) - { - case DebugReportObjectTypeEXT::eUnknown : return "Unknown"; - case DebugReportObjectTypeEXT::eInstance : return "Instance"; - case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice"; - case DebugReportObjectTypeEXT::eDevice : return "Device"; - case DebugReportObjectTypeEXT::eQueue : return "Queue"; - case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore"; - case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer"; - case DebugReportObjectTypeEXT::eFence : return "Fence"; - case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory"; - case DebugReportObjectTypeEXT::eBuffer : return "Buffer"; - case DebugReportObjectTypeEXT::eImage : return "Image"; - case DebugReportObjectTypeEXT::eEvent : return "Event"; - case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool"; - case DebugReportObjectTypeEXT::eBufferView : return "BufferView"; - case DebugReportObjectTypeEXT::eImageView : return "ImageView"; - case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule"; - case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache"; - case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout"; - case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass"; - case DebugReportObjectTypeEXT::ePipeline : return "Pipeline"; - case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout"; - case DebugReportObjectTypeEXT::eSampler : return "Sampler"; - case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool"; - case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet"; - case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer"; - case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool"; - case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR"; - case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR"; - case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR"; - case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR"; - case DebugReportObjectTypeEXT::eObjectTableNVX : return "ObjectTableNVX"; - case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; - case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT"; - case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; - case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV"; - default: return "invalid"; + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); } - } - enum class DescriptorType - { - eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, - eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, - eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, - eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, - eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, - eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, - eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, - eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, - eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV - }; - - VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) - { - switch ( value ) - { - case DescriptorType::eSampler : return "Sampler"; - case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler"; - case DescriptorType::eSampledImage : return "SampledImage"; - case DescriptorType::eStorageImage : return "StorageImage"; - case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer"; - case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer"; - case DescriptorType::eUniformBuffer : return "UniformBuffer"; - case DescriptorType::eStorageBuffer : return "StorageBuffer"; - case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic"; - case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic"; - case DescriptorType::eInputAttachment : return "InputAttachment"; - case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT"; - case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV"; - default: return "invalid"; + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); } - } - enum class DescriptorUpdateTemplateType - { - eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, - eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR - }; +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === - VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) - { - switch ( value ) + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const + VULKAN_HPP_NOEXCEPT { - case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet"; - case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR"; - default: return "invalid"; + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); } - } - - enum class DeviceEventTypeEXT - { - eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - }; - VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) - { - switch ( value ) + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT { - case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug"; - default: return "invalid"; + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); } - } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - enum class DiscardRectangleModeEXT - { - eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - }; + //=== VK_EXT_sample_locations === - VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) - { - switch ( value ) + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT { - case DiscardRectangleModeEXT::eInclusive : return "Inclusive"; - case DiscardRectangleModeEXT::eExclusive : return "Exclusive"; - default: return "invalid"; + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); } - } - - enum class DisplayEventTypeEXT - { - eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - }; - VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) - { - switch ( value ) + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const + VULKAN_HPP_NOEXCEPT { - case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut"; - default: return "invalid"; + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); } - } - enum class DisplayPowerStateEXT - { - eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, - eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, - eOn = VK_DISPLAY_POWER_STATE_ON_EXT - }; + //=== VK_KHR_get_memory_requirements2 === - VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) - { - switch ( value ) + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - case DisplayPowerStateEXT::eOff : return "Off"; - case DisplayPowerStateEXT::eSuspend : return "Suspend"; - case DisplayPowerStateEXT::eOn : return "On"; - default: return "invalid"; + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); } - } - enum class DriverIdKHR - { - eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY_KHR, - eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR, - eMesaRadv = VK_DRIVER_ID_MESA_RADV_KHR, - eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, - eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, - eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR, - eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR, - eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR, - eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY_KHR, - eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR, - eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY_KHR, - eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( DriverIdKHR value ) - { - switch ( value ) - { - case DriverIdKHR::eAmdProprietary : return "AmdProprietary"; - case DriverIdKHR::eAmdOpenSource : return "AmdOpenSource"; - case DriverIdKHR::eMesaRadv : return "MesaRadv"; - case DriverIdKHR::eNvidiaProprietary : return "NvidiaProprietary"; - case DriverIdKHR::eIntelProprietaryWindows : return "IntelProprietaryWindows"; - case DriverIdKHR::eIntelOpenSourceMESA : return "IntelOpenSourceMESA"; - case DriverIdKHR::eImaginationProprietary : return "ImaginationProprietary"; - case DriverIdKHR::eQualcommProprietary : return "QualcommProprietary"; - case DriverIdKHR::eArmProprietary : return "ArmProprietary"; - case DriverIdKHR::eGoogleSwiftshader : return "GoogleSwiftshader"; - case DriverIdKHR::eGgpProprietary : return "GgpProprietary"; - case DriverIdKHR::eBroadcomProprietary : return "BroadcomProprietary"; - default: return "invalid"; + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); } - } - enum class DynamicState - { - eViewport = VK_DYNAMIC_STATE_VIEWPORT, - eScissor = VK_DYNAMIC_STATE_SCISSOR, - eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, - eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, - eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, - eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, - eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, - eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, - eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, - eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, - eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, - eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, - eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, - eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( DynamicState value ) - { - switch ( value ) - { - case DynamicState::eViewport : return "Viewport"; - case DynamicState::eScissor : return "Scissor"; - case DynamicState::eLineWidth : return "LineWidth"; - case DynamicState::eDepthBias : return "DepthBias"; - case DynamicState::eBlendConstants : return "BlendConstants"; - case DynamicState::eDepthBounds : return "DepthBounds"; - case DynamicState::eStencilCompareMask : return "StencilCompareMask"; - case DynamicState::eStencilWriteMask : return "StencilWriteMask"; - case DynamicState::eStencilReference : return "StencilReference"; - case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV"; - case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT"; - case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT"; - case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV"; - case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV"; - case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV"; - case DynamicState::eLineStippleEXT : return "LineStippleEXT"; - default: return "invalid"; + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - } - enum class Filter - { - eNearest = VK_FILTER_NEAREST, - eLinear = VK_FILTER_LINEAR, - eCubicIMG = VK_FILTER_CUBIC_IMG, - eCubicEXT = VK_FILTER_CUBIC_EXT - }; + //=== VK_KHR_acceleration_structure === - VULKAN_HPP_INLINE std::string to_string( Filter value ) - { - switch ( value ) + VkResult + vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT { - case Filter::eNearest : return "Nearest"; - case Filter::eLinear : return "Linear"; - case Filter::eCubicIMG : return "CubicIMG"; - default: return "invalid"; + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); } - } - enum class Format - { - eUndefined = VK_FORMAT_UNDEFINED, - eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, - eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, - eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, - eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, - eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, - eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, - eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, - eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, - eR8Unorm = VK_FORMAT_R8_UNORM, - eR8Snorm = VK_FORMAT_R8_SNORM, - eR8Uscaled = VK_FORMAT_R8_USCALED, - eR8Sscaled = VK_FORMAT_R8_SSCALED, - eR8Uint = VK_FORMAT_R8_UINT, - eR8Sint = VK_FORMAT_R8_SINT, - eR8Srgb = VK_FORMAT_R8_SRGB, - eR8G8Unorm = VK_FORMAT_R8G8_UNORM, - eR8G8Snorm = VK_FORMAT_R8G8_SNORM, - eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, - eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, - eR8G8Uint = VK_FORMAT_R8G8_UINT, - eR8G8Sint = VK_FORMAT_R8G8_SINT, - eR8G8Srgb = VK_FORMAT_R8G8_SRGB, - eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, - eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, - eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, - eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, - eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, - eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, - eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, - eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, - eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, - eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, - eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, - eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, - eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, - eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, - eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, - eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, - eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, - eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, - eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, - eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, - eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, - eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, - eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, - eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, - eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, - eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, - eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, - eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, - eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, - eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, - eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, - eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, - eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, - eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, - eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, - eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, - eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, - eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, - eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, - eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, - eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, - eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, - eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, - eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, - eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, - eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, - eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, - eR16Unorm = VK_FORMAT_R16_UNORM, - eR16Snorm = VK_FORMAT_R16_SNORM, - eR16Uscaled = VK_FORMAT_R16_USCALED, - eR16Sscaled = VK_FORMAT_R16_SSCALED, - eR16Uint = VK_FORMAT_R16_UINT, - eR16Sint = VK_FORMAT_R16_SINT, - eR16Sfloat = VK_FORMAT_R16_SFLOAT, - eR16G16Unorm = VK_FORMAT_R16G16_UNORM, - eR16G16Snorm = VK_FORMAT_R16G16_SNORM, - eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, - eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, - eR16G16Uint = VK_FORMAT_R16G16_UINT, - eR16G16Sint = VK_FORMAT_R16G16_SINT, - eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, - eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, - eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, - eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, - eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, - eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, - eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, - eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, - eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, - eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, - eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, - eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, - eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, - eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, - eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, - eR32Uint = VK_FORMAT_R32_UINT, - eR32Sint = VK_FORMAT_R32_SINT, - eR32Sfloat = VK_FORMAT_R32_SFLOAT, - eR32G32Uint = VK_FORMAT_R32G32_UINT, - eR32G32Sint = VK_FORMAT_R32G32_SINT, - eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, - eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, - eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, - eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, - eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, - eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, - eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, - eR64Uint = VK_FORMAT_R64_UINT, - eR64Sint = VK_FORMAT_R64_SINT, - eR64Sfloat = VK_FORMAT_R64_SFLOAT, - eR64G64Uint = VK_FORMAT_R64G64_UINT, - eR64G64Sint = VK_FORMAT_R64G64_SINT, - eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, - eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, - eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, - eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, - eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, - eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, - eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, - eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, - eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, - eD16Unorm = VK_FORMAT_D16_UNORM, - eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, - eD32Sfloat = VK_FORMAT_D32_SFLOAT, - eS8Uint = VK_FORMAT_S8_UINT, - eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, - eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, - eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, - eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, - eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, - eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, - eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, - eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, - eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, - eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, - eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, - eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, - eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, - eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, - eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, - eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, - eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, - eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, - eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, - eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, - eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, - eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, - eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, - eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, - eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, - eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, - eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, - eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, - eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, - eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, - eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, - eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, - eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, - eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, - eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, - eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, - eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, - eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, - eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, - eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, - eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, - eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, - eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, - eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, - eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, - eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, - eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, - eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, - eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, - eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, - eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, - eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, - eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, - eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, - eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, - eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, - eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, - eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, - eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, - eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, - eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, - eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, - eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, - eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, - eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, - eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, - eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, - eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, - eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, - eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, - eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, - eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, - eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, - eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, - eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, - eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, - eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, - eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, - eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, - eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, - eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, - eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, - eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, - eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, - eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, - eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, - eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, - eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, - eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, - eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, - ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, - ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, - ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, - ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, - ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, - ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, - ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, - ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, - eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, - eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, - eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, - eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, - eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, - eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, - eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, - eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, - eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, - eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, - eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, - eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, - eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, - eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, - eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, - eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, - eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, - eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, - eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, - eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, - eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, - eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, - eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, - eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, - eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, - eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, - eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, - eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, - eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, - eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, - eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, - eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, - eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, - eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, - eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, - eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, - eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, - eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, - eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, - eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, - eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, - eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, - eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, - eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, - eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, - eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, - eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, - eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( Format value ) - { - switch ( value ) - { - case Format::eUndefined : return "Undefined"; - case Format::eR4G4UnormPack8 : return "R4G4UnormPack8"; - case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16"; - case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16"; - case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16"; - case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16"; - case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16"; - case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16"; - case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16"; - case Format::eR8Unorm : return "R8Unorm"; - case Format::eR8Snorm : return "R8Snorm"; - case Format::eR8Uscaled : return "R8Uscaled"; - case Format::eR8Sscaled : return "R8Sscaled"; - case Format::eR8Uint : return "R8Uint"; - case Format::eR8Sint : return "R8Sint"; - case Format::eR8Srgb : return "R8Srgb"; - case Format::eR8G8Unorm : return "R8G8Unorm"; - case Format::eR8G8Snorm : return "R8G8Snorm"; - case Format::eR8G8Uscaled : return "R8G8Uscaled"; - case Format::eR8G8Sscaled : return "R8G8Sscaled"; - case Format::eR8G8Uint : return "R8G8Uint"; - case Format::eR8G8Sint : return "R8G8Sint"; - case Format::eR8G8Srgb : return "R8G8Srgb"; - case Format::eR8G8B8Unorm : return "R8G8B8Unorm"; - case Format::eR8G8B8Snorm : return "R8G8B8Snorm"; - case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled"; - case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled"; - case Format::eR8G8B8Uint : return "R8G8B8Uint"; - case Format::eR8G8B8Sint : return "R8G8B8Sint"; - case Format::eR8G8B8Srgb : return "R8G8B8Srgb"; - case Format::eB8G8R8Unorm : return "B8G8R8Unorm"; - case Format::eB8G8R8Snorm : return "B8G8R8Snorm"; - case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled"; - case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled"; - case Format::eB8G8R8Uint : return "B8G8R8Uint"; - case Format::eB8G8R8Sint : return "B8G8R8Sint"; - case Format::eB8G8R8Srgb : return "B8G8R8Srgb"; - case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm"; - case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm"; - case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled"; - case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled"; - case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint"; - case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint"; - case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb"; - case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm"; - case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm"; - case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled"; - case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled"; - case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint"; - case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint"; - case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb"; - case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32"; - case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32"; - case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32"; - case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32"; - case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32"; - case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32"; - case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32"; - case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32"; - case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32"; - case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32"; - case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32"; - case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32"; - case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32"; - case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32"; - case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32"; - case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32"; - case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32"; - case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32"; - case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32"; - case Format::eR16Unorm : return "R16Unorm"; - case Format::eR16Snorm : return "R16Snorm"; - case Format::eR16Uscaled : return "R16Uscaled"; - case Format::eR16Sscaled : return "R16Sscaled"; - case Format::eR16Uint : return "R16Uint"; - case Format::eR16Sint : return "R16Sint"; - case Format::eR16Sfloat : return "R16Sfloat"; - case Format::eR16G16Unorm : return "R16G16Unorm"; - case Format::eR16G16Snorm : return "R16G16Snorm"; - case Format::eR16G16Uscaled : return "R16G16Uscaled"; - case Format::eR16G16Sscaled : return "R16G16Sscaled"; - case Format::eR16G16Uint : return "R16G16Uint"; - case Format::eR16G16Sint : return "R16G16Sint"; - case Format::eR16G16Sfloat : return "R16G16Sfloat"; - case Format::eR16G16B16Unorm : return "R16G16B16Unorm"; - case Format::eR16G16B16Snorm : return "R16G16B16Snorm"; - case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled"; - case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled"; - case Format::eR16G16B16Uint : return "R16G16B16Uint"; - case Format::eR16G16B16Sint : return "R16G16B16Sint"; - case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat"; - case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm"; - case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm"; - case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled"; - case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled"; - case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint"; - case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint"; - case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat"; - case Format::eR32Uint : return "R32Uint"; - case Format::eR32Sint : return "R32Sint"; - case Format::eR32Sfloat : return "R32Sfloat"; - case Format::eR32G32Uint : return "R32G32Uint"; - case Format::eR32G32Sint : return "R32G32Sint"; - case Format::eR32G32Sfloat : return "R32G32Sfloat"; - case Format::eR32G32B32Uint : return "R32G32B32Uint"; - case Format::eR32G32B32Sint : return "R32G32B32Sint"; - case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat"; - case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint"; - case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint"; - case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat"; - case Format::eR64Uint : return "R64Uint"; - case Format::eR64Sint : return "R64Sint"; - case Format::eR64Sfloat : return "R64Sfloat"; - case Format::eR64G64Uint : return "R64G64Uint"; - case Format::eR64G64Sint : return "R64G64Sint"; - case Format::eR64G64Sfloat : return "R64G64Sfloat"; - case Format::eR64G64B64Uint : return "R64G64B64Uint"; - case Format::eR64G64B64Sint : return "R64G64B64Sint"; - case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat"; - case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint"; - case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint"; - case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat"; - case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32"; - case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32"; - case Format::eD16Unorm : return "D16Unorm"; - case Format::eX8D24UnormPack32 : return "X8D24UnormPack32"; - case Format::eD32Sfloat : return "D32Sfloat"; - case Format::eS8Uint : return "S8Uint"; - case Format::eD16UnormS8Uint : return "D16UnormS8Uint"; - case Format::eD24UnormS8Uint : return "D24UnormS8Uint"; - case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint"; - case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock"; - case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock"; - case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock"; - case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock"; - case Format::eBc2UnormBlock : return "Bc2UnormBlock"; - case Format::eBc2SrgbBlock : return "Bc2SrgbBlock"; - case Format::eBc3UnormBlock : return "Bc3UnormBlock"; - case Format::eBc3SrgbBlock : return "Bc3SrgbBlock"; - case Format::eBc4UnormBlock : return "Bc4UnormBlock"; - case Format::eBc4SnormBlock : return "Bc4SnormBlock"; - case Format::eBc5UnormBlock : return "Bc5UnormBlock"; - case Format::eBc5SnormBlock : return "Bc5SnormBlock"; - case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock"; - case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock"; - case Format::eBc7UnormBlock : return "Bc7UnormBlock"; - case Format::eBc7SrgbBlock : return "Bc7SrgbBlock"; - case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock"; - case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock"; - case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock"; - case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock"; - case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock"; - case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock"; - case Format::eEacR11UnormBlock : return "EacR11UnormBlock"; - case Format::eEacR11SnormBlock : return "EacR11SnormBlock"; - case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock"; - case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock"; - case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock"; - case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock"; - case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock"; - case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock"; - case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock"; - case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock"; - case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock"; - case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock"; - case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock"; - case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock"; - case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock"; - case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock"; - case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock"; - case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock"; - case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock"; - case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock"; - case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock"; - case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock"; - case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock"; - case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock"; - case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock"; - case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock"; - case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock"; - case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock"; - case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock"; - case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock"; - case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock"; - case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock"; - case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm"; - case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm"; - case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm"; - case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm"; - case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm"; - case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm"; - case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm"; - case Format::eR10X6UnormPack16 : return "R10X6UnormPack16"; - case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16"; - case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16"; - case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; - case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; - case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16"; - case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16"; - case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16"; - case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16"; - case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16"; - case Format::eR12X4UnormPack16 : return "R12X4UnormPack16"; - case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16"; - case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16"; - case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; - case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; - case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16"; - case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16"; - case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16"; - case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16"; - case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16"; - case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm"; - case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm"; - case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm"; - case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm"; - case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm"; - case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm"; - case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm"; - case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG"; - case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG"; - case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG"; - case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG"; - case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG"; - case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG"; - case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG"; - case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG"; - case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT"; - case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT"; - case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT"; - case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT"; - case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT"; - case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT"; - case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT"; - case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT"; - case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT"; - case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT"; - case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT"; - case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT"; - case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT"; - case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT"; - default: return "invalid"; + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); } - } - enum class FrontFace - { - eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, - eClockwise = VK_FRONT_FACE_CLOCKWISE - }; - - VULKAN_HPP_INLINE std::string to_string( FrontFace value ) - { - switch ( value ) + void vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT { - case FrontFace::eCounterClockwise : return "CounterClockwise"; - case FrontFace::eClockwise : return "Clockwise"; - default: return "invalid"; + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); } - } -#ifdef VK_USE_PLATFORM_WIN32_KHR - enum class FullScreenExclusiveEXT - { - eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, - eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, - eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, - eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT - }; + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } - VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) - { - switch ( value ) + VkResult vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT { - case FullScreenExclusiveEXT::eDefault : return "Default"; - case FullScreenExclusiveEXT::eAllowed : return "Allowed"; - case FullScreenExclusiveEXT::eDisallowed : return "Disallowed"; - case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled"; - default: return "invalid"; + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); } - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - enum class GeometryTypeNV - { - eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV, - eAabbs = VK_GEOMETRY_TYPE_AABBS_NV - }; + VkResult + vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } - VULKAN_HPP_INLINE std::string to_string( GeometryTypeNV value ) - { - switch ( value ) + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT { - case GeometryTypeNV::eTriangles : return "Triangles"; - case GeometryTypeNV::eAabbs : return "Aabbs"; - default: return "invalid"; + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); } - } - enum class ImageLayout - { - eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, - eGeneral = VK_IMAGE_LAYOUT_GENERAL, - eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, - eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, - eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, - eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, - eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, - ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, - eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, - eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, - eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, - eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, - eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) - { - switch ( value ) - { - case ImageLayout::eUndefined : return "Undefined"; - case ImageLayout::eGeneral : return "General"; - case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal"; - case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal"; - case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal"; - case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal"; - case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal"; - case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal"; - case ImageLayout::ePreinitialized : return "Preinitialized"; - case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal"; - case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal"; - case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR"; - case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR"; - case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV"; - case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT"; - default: return "invalid"; + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); } - } - enum class ImageTiling - { - eOptimal = VK_IMAGE_TILING_OPTIMAL, - eLinear = VK_IMAGE_TILING_LINEAR, - eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT - }; + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( + device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } - VULKAN_HPP_INLINE std::string to_string( ImageTiling value ) - { - switch ( value ) + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { - case ImageTiling::eOptimal : return "Optimal"; - case ImageTiling::eLinear : return "Linear"; - case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT"; - default: return "invalid"; + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); } - } - enum class ImageType - { - e1D = VK_IMAGE_TYPE_1D, - e2D = VK_IMAGE_TYPE_2D, - e3D = VK_IMAGE_TYPE_3D - }; + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } - VULKAN_HPP_INLINE std::string to_string( ImageType value ) - { - switch ( value ) + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const + VULKAN_HPP_NOEXCEPT { - case ImageType::e1D : return "1D"; - case ImageType::e2D : return "2D"; - case ImageType::e3D : return "3D"; - default: return "invalid"; + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); } - } - enum class ImageViewType - { - e1D = VK_IMAGE_VIEW_TYPE_1D, - e2D = VK_IMAGE_VIEW_TYPE_2D, - e3D = VK_IMAGE_VIEW_TYPE_3D, - eCube = VK_IMAGE_VIEW_TYPE_CUBE, - e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, - e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, - eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - }; + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } - VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) - { - switch ( value ) + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { - case ImageViewType::e1D : return "1D"; - case ImageViewType::e2D : return "2D"; - case ImageViewType::e3D : return "3D"; - case ImageViewType::eCube : return "Cube"; - case ImageViewType::e1DArray : return "1DArray"; - case ImageViewType::e2DArray : return "2DArray"; - case ImageViewType::eCubeArray : return "CubeArray"; - default: return "invalid"; + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } - } - enum class IndexType - { - eUint16 = VK_INDEX_TYPE_UINT16, - eUint32 = VK_INDEX_TYPE_UINT32, - eNoneNV = VK_INDEX_TYPE_NONE_NV, - eUint8EXT = VK_INDEX_TYPE_UINT8_EXT - }; + void vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } - VULKAN_HPP_INLINE std::string to_string( IndexType value ) - { - switch ( value ) + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const + VULKAN_HPP_NOEXCEPT { - case IndexType::eUint16 : return "Uint16"; - case IndexType::eUint32 : return "Uint32"; - case IndexType::eNoneNV : return "NoneNV"; - case IndexType::eUint8EXT : return "Uint8EXT"; - default: return "invalid"; + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); } - } - enum class IndirectCommandsTokenTypeNVX - { - ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, - eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, - eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, - ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, - eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, - eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, - eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - }; + //=== VK_KHR_sampler_ycbcr_conversion === - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNVX value ) - { - switch ( value ) + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT { - case IndirectCommandsTokenTypeNVX::ePipeline : return "Pipeline"; - case IndirectCommandsTokenTypeNVX::eDescriptorSet : return "DescriptorSet"; - case IndirectCommandsTokenTypeNVX::eIndexBuffer : return "IndexBuffer"; - case IndirectCommandsTokenTypeNVX::eVertexBuffer : return "VertexBuffer"; - case IndirectCommandsTokenTypeNVX::ePushConstant : return "PushConstant"; - case IndirectCommandsTokenTypeNVX::eDrawIndexed : return "DrawIndexed"; - case IndirectCommandsTokenTypeNVX::eDraw : return "Draw"; - case IndirectCommandsTokenTypeNVX::eDispatch : return "Dispatch"; - default: return "invalid"; + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); } - } - - enum class InternalAllocationType - { - eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - }; - VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value ) - { - switch ( value ) + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - case InternalAllocationType::eExecutable : return "Executable"; - default: return "invalid"; + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); } - } - enum class LineRasterizationModeEXT - { - eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, - eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, - eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, - eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT - }; + //=== VK_KHR_bind_memory2 === - VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) - { - switch ( value ) + VkResult vkBindBufferMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { - case LineRasterizationModeEXT::eDefault : return "Default"; - case LineRasterizationModeEXT::eRectangular : return "Rectangular"; - case LineRasterizationModeEXT::eBresenham : return "Bresenham"; - case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth"; - default: return "invalid"; + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); } - } - enum class LogicOp - { - eClear = VK_LOGIC_OP_CLEAR, - eAnd = VK_LOGIC_OP_AND, - eAndReverse = VK_LOGIC_OP_AND_REVERSE, - eCopy = VK_LOGIC_OP_COPY, - eAndInverted = VK_LOGIC_OP_AND_INVERTED, - eNoOp = VK_LOGIC_OP_NO_OP, - eXor = VK_LOGIC_OP_XOR, - eOr = VK_LOGIC_OP_OR, - eNor = VK_LOGIC_OP_NOR, - eEquivalent = VK_LOGIC_OP_EQUIVALENT, - eInvert = VK_LOGIC_OP_INVERT, - eOrReverse = VK_LOGIC_OP_OR_REVERSE, - eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, - eOrInverted = VK_LOGIC_OP_OR_INVERTED, - eNand = VK_LOGIC_OP_NAND, - eSet = VK_LOGIC_OP_SET - }; - - VULKAN_HPP_INLINE std::string to_string( LogicOp value ) - { - switch ( value ) - { - case LogicOp::eClear : return "Clear"; - case LogicOp::eAnd : return "And"; - case LogicOp::eAndReverse : return "AndReverse"; - case LogicOp::eCopy : return "Copy"; - case LogicOp::eAndInverted : return "AndInverted"; - case LogicOp::eNoOp : return "NoOp"; - case LogicOp::eXor : return "Xor"; - case LogicOp::eOr : return "Or"; - case LogicOp::eNor : return "Nor"; - case LogicOp::eEquivalent : return "Equivalent"; - case LogicOp::eInvert : return "Invert"; - case LogicOp::eOrReverse : return "OrReverse"; - case LogicOp::eCopyInverted : return "CopyInverted"; - case LogicOp::eOrInverted : return "OrInverted"; - case LogicOp::eNand : return "Nand"; - case LogicOp::eSet : return "Set"; - default: return "invalid"; + VkResult vkBindImageMemory2KHR( VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); } - } - enum class MemoryOverallocationBehaviorAMD - { - eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, - eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, - eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - }; + //=== VK_EXT_image_drm_format_modifier === - VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) - { - switch ( value ) + VkResult vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT { - case MemoryOverallocationBehaviorAMD::eDefault : return "Default"; - case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed"; - case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed"; - default: return "invalid"; + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); } - } - enum class ObjectEntryTypeNVX - { - eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, - eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, - eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, - ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - }; + //=== VK_EXT_validation_cache === - VULKAN_HPP_INLINE std::string to_string( ObjectEntryTypeNVX value ) - { - switch ( value ) + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT { - case ObjectEntryTypeNVX::eDescriptorSet : return "DescriptorSet"; - case ObjectEntryTypeNVX::ePipeline : return "Pipeline"; - case ObjectEntryTypeNVX::eIndexBuffer : return "IndexBuffer"; - case ObjectEntryTypeNVX::eVertexBuffer : return "VertexBuffer"; - case ObjectEntryTypeNVX::ePushConstant : return "PushConstant"; - default: return "invalid"; + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); } - } - enum class ObjectType - { - eUnknown = VK_OBJECT_TYPE_UNKNOWN, - eInstance = VK_OBJECT_TYPE_INSTANCE, - ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, - eDevice = VK_OBJECT_TYPE_DEVICE, - eQueue = VK_OBJECT_TYPE_QUEUE, - eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, - eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, - eFence = VK_OBJECT_TYPE_FENCE, - eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, - eBuffer = VK_OBJECT_TYPE_BUFFER, - eImage = VK_OBJECT_TYPE_IMAGE, - eEvent = VK_OBJECT_TYPE_EVENT, - eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, - eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, - eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, - eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, - ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, - ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, - eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, - ePipeline = VK_OBJECT_TYPE_PIPELINE, - eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, - eSampler = VK_OBJECT_TYPE_SAMPLER, - eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, - eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, - eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, - eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, - eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, - eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, - eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, - eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, - eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, - eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, - eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX, - eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, - eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, - eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, - eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, - ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, - eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, - eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ObjectType value ) - { - switch ( value ) - { - case ObjectType::eUnknown : return "Unknown"; - case ObjectType::eInstance : return "Instance"; - case ObjectType::ePhysicalDevice : return "PhysicalDevice"; - case ObjectType::eDevice : return "Device"; - case ObjectType::eQueue : return "Queue"; - case ObjectType::eSemaphore : return "Semaphore"; - case ObjectType::eCommandBuffer : return "CommandBuffer"; - case ObjectType::eFence : return "Fence"; - case ObjectType::eDeviceMemory : return "DeviceMemory"; - case ObjectType::eBuffer : return "Buffer"; - case ObjectType::eImage : return "Image"; - case ObjectType::eEvent : return "Event"; - case ObjectType::eQueryPool : return "QueryPool"; - case ObjectType::eBufferView : return "BufferView"; - case ObjectType::eImageView : return "ImageView"; - case ObjectType::eShaderModule : return "ShaderModule"; - case ObjectType::ePipelineCache : return "PipelineCache"; - case ObjectType::ePipelineLayout : return "PipelineLayout"; - case ObjectType::eRenderPass : return "RenderPass"; - case ObjectType::ePipeline : return "Pipeline"; - case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout"; - case ObjectType::eSampler : return "Sampler"; - case ObjectType::eDescriptorPool : return "DescriptorPool"; - case ObjectType::eDescriptorSet : return "DescriptorSet"; - case ObjectType::eFramebuffer : return "Framebuffer"; - case ObjectType::eCommandPool : return "CommandPool"; - case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; - case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; - case ObjectType::eSurfaceKHR : return "SurfaceKHR"; - case ObjectType::eSwapchainKHR : return "SwapchainKHR"; - case ObjectType::eDisplayKHR : return "DisplayKHR"; - case ObjectType::eDisplayModeKHR : return "DisplayModeKHR"; - case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; - case ObjectType::eObjectTableNVX : return "ObjectTableNVX"; - case ObjectType::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; - case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT"; - case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT"; - case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV"; - case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL"; - default: return "invalid"; + void vkDestroyValidationCacheEXT( VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); } - } - enum class PerformanceConfigurationTypeINTEL - { - eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL - }; + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } - VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) - { - switch ( value ) + VkResult vkGetValidationCacheDataEXT( VkDevice device, + VkValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { - case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated"; - default: return "invalid"; + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); } - } - enum class PerformanceOverrideTypeINTEL - { - eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, - eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL - }; + //=== VK_NV_shading_rate_image === - VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) - { - switch ( value ) + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { - case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware"; - case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches"; - default: return "invalid"; + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); } - } - enum class PerformanceParameterTypeINTEL - { - eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, - eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL - }; + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( + commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } - VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) - { - switch ( value ) + void + vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT { - case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported"; - case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits"; - default: return "invalid"; + return ::vkCmdSetCoarseSampleOrderNV( + commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); } - } - enum class PerformanceValueTypeINTEL - { - eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, - eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, - eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, - eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, - eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL - }; + //=== VK_NV_ray_tracing === - VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) - { - switch ( value ) + VkResult + vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT { - case PerformanceValueTypeINTEL::eUint32 : return "Uint32"; - case PerformanceValueTypeINTEL::eUint64 : return "Uint64"; - case PerformanceValueTypeINTEL::eFloat : return "Float"; - case PerformanceValueTypeINTEL::eBool : return "Bool"; - case PerformanceValueTypeINTEL::eString : return "String"; - default: return "invalid"; + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); } - } - enum class PhysicalDeviceType - { - eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, - eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, - eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, - eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, - eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU - }; - - VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) - { - switch ( value ) + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - case PhysicalDeviceType::eOther : return "Other"; - case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu"; - case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu"; - case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu"; - case PhysicalDeviceType::eCpu : return "Cpu"; - default: return "invalid"; + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); } - } - enum class PipelineBindPoint - { - eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, - eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, - eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV - }; + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } - VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) - { - switch ( value ) + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const + VULKAN_HPP_NOEXCEPT { - case PipelineBindPoint::eGraphics : return "Graphics"; - case PipelineBindPoint::eCompute : return "Compute"; - case PipelineBindPoint::eRayTracingNV : return "RayTracingNV"; - default: return "invalid"; + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); } - } - enum class PipelineCacheHeaderVersion - { - eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE - }; + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( + commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } - VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) - { - switch ( value ) + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT { - case PipelineCacheHeaderVersion::eOne : return "One"; - default: return "invalid"; + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); } - } - enum class PipelineExecutableStatisticFormatKHR - { - eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, - eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, - eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, - eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR - }; + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } - VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) - { - switch ( value ) + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT { - case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32"; - case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64"; - case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64"; - case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64"; - default: return "invalid"; + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); } - } - enum class PointClippingBehavior - { - eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, - eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, - eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, - eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR - }; + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } - VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) - { - switch ( value ) + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT { - case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes"; - case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly"; - default: return "invalid"; + return ::vkCompileDeferredNV( device, pipeline, shader ); } - } - enum class PolygonMode - { - eFill = VK_POLYGON_MODE_FILL, - eLine = VK_POLYGON_MODE_LINE, - ePoint = VK_POLYGON_MODE_POINT, - eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV - }; + //=== VK_KHR_maintenance3 === - VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) - { - switch ( value ) + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT { - case PolygonMode::eFill : return "Fill"; - case PolygonMode::eLine : return "Line"; - case PolygonMode::ePoint : return "Point"; - case PolygonMode::eFillRectangleNV : return "FillRectangleNV"; - default: return "invalid"; + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); } - } - enum class PresentModeKHR - { - eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, - eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, - eFifo = VK_PRESENT_MODE_FIFO_KHR, - eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, - eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR - }; + //=== VK_KHR_draw_indirect_count === - VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) - { - switch ( value ) + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - case PresentModeKHR::eImmediate : return "Immediate"; - case PresentModeKHR::eMailbox : return "Mailbox"; - case PresentModeKHR::eFifo : return "Fifo"; - case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed"; - case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh"; - case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh"; - default: return "invalid"; + return ::vkCmdDrawIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - } - enum class PrimitiveTopology - { - ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, - eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, - eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, - eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, - eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, - eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, - eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, - eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, - eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, - ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - }; - - VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) - { - switch ( value ) - { - case PrimitiveTopology::ePointList : return "PointList"; - case PrimitiveTopology::eLineList : return "LineList"; - case PrimitiveTopology::eLineStrip : return "LineStrip"; - case PrimitiveTopology::eTriangleList : return "TriangleList"; - case PrimitiveTopology::eTriangleStrip : return "TriangleStrip"; - case PrimitiveTopology::eTriangleFan : return "TriangleFan"; - case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency"; - case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency"; - case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency"; - case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency"; - case PrimitiveTopology::ePatchList : return "PatchList"; - default: return "invalid"; + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - } - enum class QueryPoolSamplingModeINTEL - { - eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL - }; + //=== VK_EXT_external_memory_host === - VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) - { - switch ( value ) + VkResult vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT { - case QueryPoolSamplingModeINTEL::eManual : return "Manual"; - default: return "invalid"; + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); } - } - enum class QueryType - { - eOcclusion = VK_QUERY_TYPE_OCCLUSION, - ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, - eTimestamp = VK_QUERY_TYPE_TIMESTAMP, - eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, - eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, - ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL - }; + //=== VK_AMD_buffer_marker === - VULKAN_HPP_INLINE std::string to_string( QueryType value ) - { - switch ( value ) + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT { - case QueryType::eOcclusion : return "Occlusion"; - case QueryType::ePipelineStatistics : return "PipelineStatistics"; - case QueryType::eTimestamp : return "Timestamp"; - case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT"; - case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV"; - case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL"; - default: return "invalid"; + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); } - } - enum class QueueGlobalPriorityEXT - { - eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, - eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, - eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, - eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - }; + //=== VK_EXT_calibrated_timestamps === - VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT { - case QueueGlobalPriorityEXT::eLow : return "Low"; - case QueueGlobalPriorityEXT::eMedium : return "Medium"; - case QueueGlobalPriorityEXT::eHigh : return "High"; - case QueueGlobalPriorityEXT::eRealtime : return "Realtime"; - default: return "invalid"; + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); } - } - - enum class RasterizationOrderAMD - { - eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, - eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD - }; - VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) - { - switch ( value ) + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT { - case RasterizationOrderAMD::eStrict : return "Strict"; - case RasterizationOrderAMD::eRelaxed : return "Relaxed"; - default: return "invalid"; + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); } - } - enum class RayTracingShaderGroupTypeNV - { - eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, - eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, - eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - }; + //=== VK_NV_mesh_shader === - VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeNV value ) - { - switch ( value ) + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT { - case RayTracingShaderGroupTypeNV::eGeneral : return "General"; - case RayTracingShaderGroupTypeNV::eTrianglesHitGroup : return "TrianglesHitGroup"; - case RayTracingShaderGroupTypeNV::eProceduralHitGroup : return "ProceduralHitGroup"; - default: return "invalid"; + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); } - } - enum class Result - { - eSuccess = VK_SUCCESS, - eNotReady = VK_NOT_READY, - eTimeout = VK_TIMEOUT, - eEventSet = VK_EVENT_SET, - eEventReset = VK_EVENT_RESET, - eIncomplete = VK_INCOMPLETE, - eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, - eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, - eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, - eErrorDeviceLost = VK_ERROR_DEVICE_LOST, - eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, - eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, - eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, - eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, - eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, - eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, - eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, - eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, - eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, - eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, - eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, - eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, - eSuboptimalKHR = VK_SUBOPTIMAL_KHR, - eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, - eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, - eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, - eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, - eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, - eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, - eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, - eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, - eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, - eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, - eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( Result value ) - { - switch ( value ) - { - case Result::eSuccess : return "Success"; - case Result::eNotReady : return "NotReady"; - case Result::eTimeout : return "Timeout"; - case Result::eEventSet : return "EventSet"; - case Result::eEventReset : return "EventReset"; - case Result::eIncomplete : return "Incomplete"; - case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory"; - case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory"; - case Result::eErrorInitializationFailed : return "ErrorInitializationFailed"; - case Result::eErrorDeviceLost : return "ErrorDeviceLost"; - case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed"; - case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent"; - case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent"; - case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent"; - case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver"; - case Result::eErrorTooManyObjects : return "ErrorTooManyObjects"; - case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported"; - case Result::eErrorFragmentedPool : return "ErrorFragmentedPool"; - case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory"; - case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle"; - case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR"; - case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR"; - case Result::eSuboptimalKHR : return "SuboptimalKHR"; - case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR"; - case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR"; - case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT"; - case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV"; - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; - case Result::eErrorFragmentationEXT : return "ErrorFragmentationEXT"; - case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT"; - case Result::eErrorInvalidDeviceAddressEXT : return "ErrorInvalidDeviceAddressEXT"; - case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT"; - default: return "invalid"; + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); } - } - enum class SamplerAddressMode - { - eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, - eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, - eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, - eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, - eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) - { - switch ( value ) + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - case SamplerAddressMode::eRepeat : return "Repeat"; - case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat"; - case SamplerAddressMode::eClampToEdge : return "ClampToEdge"; - case SamplerAddressMode::eClampToBorder : return "ClampToBorder"; - case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge"; - default: return "invalid"; + return ::vkCmdDrawMeshTasksIndirectCountNV( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - } - enum class SamplerMipmapMode - { - eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, - eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR - }; + //=== VK_NV_scissor_exclusive === - VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) - { - switch ( value ) + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT { - case SamplerMipmapMode::eNearest : return "Nearest"; - case SamplerMipmapMode::eLinear : return "Linear"; - default: return "invalid"; + return ::vkCmdSetExclusiveScissorNV( + commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); } - } - enum class SamplerReductionModeEXT - { - eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, - eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT, - eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT - }; + //=== VK_NV_device_diagnostic_checkpoints === - VULKAN_HPP_INLINE std::string to_string( SamplerReductionModeEXT value ) - { - switch ( value ) + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT { - case SamplerReductionModeEXT::eWeightedAverage : return "WeightedAverage"; - case SamplerReductionModeEXT::eMin : return "Min"; - case SamplerReductionModeEXT::eMax : return "Max"; - default: return "invalid"; + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); } - } - - enum class SamplerYcbcrModelConversion - { - eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, - eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, - eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, - eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, - eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, - eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, - eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, - eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, - eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, - eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR - }; - VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) - { - switch ( value ) + void vkGetQueueCheckpointDataNV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT { - case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity"; - case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity"; - case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709"; - case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601"; - case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020"; - default: return "invalid"; + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); } - } - enum class SamplerYcbcrRange - { - eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, - eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, - eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR, - eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR - }; + //=== VK_KHR_timeline_semaphore === - VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) - { - switch ( value ) + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, + VkSemaphore semaphore, + uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT { - case SamplerYcbcrRange::eItuFull : return "ItuFull"; - case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow"; - default: return "invalid"; + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); } - } - enum class ScopeNV - { - eDevice = VK_SCOPE_DEVICE_NV, - eWorkgroup = VK_SCOPE_WORKGROUP_NV, - eSubgroup = VK_SCOPE_SUBGROUP_NV, - eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV - }; + VkResult vkWaitSemaphoresKHR( VkDevice device, + const VkSemaphoreWaitInfo * pWaitInfo, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } - VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) - { - switch ( value ) + VkResult vkSignalSemaphoreKHR( VkDevice device, + const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT { - case ScopeNV::eDevice : return "Device"; - case ScopeNV::eWorkgroup : return "Workgroup"; - case ScopeNV::eSubgroup : return "Subgroup"; - case ScopeNV::eQueueFamily : return "QueueFamily"; - default: return "invalid"; + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); } - } - enum class ShaderFloatControlsIndependenceKHR - { - e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR, - eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR, - eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR - }; + //=== VK_INTEL_performance_query === - VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependenceKHR value ) - { - switch ( value ) + VkResult vkInitializePerformanceApiINTEL( + VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT { - case ShaderFloatControlsIndependenceKHR::e32BitOnly : return "32BitOnly"; - case ShaderFloatControlsIndependenceKHR::eAll : return "All"; - case ShaderFloatControlsIndependenceKHR::eNone : return "None"; - default: return "invalid"; + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); } - } - enum class ShaderInfoTypeAMD - { - eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, - eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, - eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - }; + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } - VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) - { - switch ( value ) + VkResult + vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { - case ShaderInfoTypeAMD::eStatistics : return "Statistics"; - case ShaderInfoTypeAMD::eBinary : return "Binary"; - case ShaderInfoTypeAMD::eDisassembly : return "Disassembly"; - default: return "invalid"; + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); } - } - enum class ShadingRatePaletteEntryNV - { - eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, - e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, - e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, - e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, - e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, - e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, - e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, - e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, - e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, - e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, - e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, - e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) - { - switch ( value ) - { - case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations"; - case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel"; - case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels"; - case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels"; - default: return "invalid"; + VkResult vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); } - } - enum class SharingMode - { - eExclusive = VK_SHARING_MODE_EXCLUSIVE, - eConcurrent = VK_SHARING_MODE_CONCURRENT - }; + VkResult + vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } - VULKAN_HPP_INLINE std::string to_string( SharingMode value ) - { - switch ( value ) + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const + VULKAN_HPP_NOEXCEPT { - case SharingMode::eExclusive : return "Exclusive"; - case SharingMode::eConcurrent : return "Concurrent"; - default: return "invalid"; + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); } - } - enum class StencilOp - { - eKeep = VK_STENCIL_OP_KEEP, - eZero = VK_STENCIL_OP_ZERO, - eReplace = VK_STENCIL_OP_REPLACE, - eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, - eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, - eInvert = VK_STENCIL_OP_INVERT, - eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, - eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP - }; + VkResult + vkReleasePerformanceConfigurationINTEL( VkDevice device, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } - VULKAN_HPP_INLINE std::string to_string( StencilOp value ) - { - switch ( value ) + VkResult + vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, + VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT { - case StencilOp::eKeep : return "Keep"; - case StencilOp::eZero : return "Zero"; - case StencilOp::eReplace : return "Replace"; - case StencilOp::eIncrementAndClamp : return "IncrementAndClamp"; - case StencilOp::eDecrementAndClamp : return "DecrementAndClamp"; - case StencilOp::eInvert : return "Invert"; - case StencilOp::eIncrementAndWrap : return "IncrementAndWrap"; - case StencilOp::eDecrementAndWrap : return "DecrementAndWrap"; - default: return "invalid"; + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); } - } - enum class StructureType - { - eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, - eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, - eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, - eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, - eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, - eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, - eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, - eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, - eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, - eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, - eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, - eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, - eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, - eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, - eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, - eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, - eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, - ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, - ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, - ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, - ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, - ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, - ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, - ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, - ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, - ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, - ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, - ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, - eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, - eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, - ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, - eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, - eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, - eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, - eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, - eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, - eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, - eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, - eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, - eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, - eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, - eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, - eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, - eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, - eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, - eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, - eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, - eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, - eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, - eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, - ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, - eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, - eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, - eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, - eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, - eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, - eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, - eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, - eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, - eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, - ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, - eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, - eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, - eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, - eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, - eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, - eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, - ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, - ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, - eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, - eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, - eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, - ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, - eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, - ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, - eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, - eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, - ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, - eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, - ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, - ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, - ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, - eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, - ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, - ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, - eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, - eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, - eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, - eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, - eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, - ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, - eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, - eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, - ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, - eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, - ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, - eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, - ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, - eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, - eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, - eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, - ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, - eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, - eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, - eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, - ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, - eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, - ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, - eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, - ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, - eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, - ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, - eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, - eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, - eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, - eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, - eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, - eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, - eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, - eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, - eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, - eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, - eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, - eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, - eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, - eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, - eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, - ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, - eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, - eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, - eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, - eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, - eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, - eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, - ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, - ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, - ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, - eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, - eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, - eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, - ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, - eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, - eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, - eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, - eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, - eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, - eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, - eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, - ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, - eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, - ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, - eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, - eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, - eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, - eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, - eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, - eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, - eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, - eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, - eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, - eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, - eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, - eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, - eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, - eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, - ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, - eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, - ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, - eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, - ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, - ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, - eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, - eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, - eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, - eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, - eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, - eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, - ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, - eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, - eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, - eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, - eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, - eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, - ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, - ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, - ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, - ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, - ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, - ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, - ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, - eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, - ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, - eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, - eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, - eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, - eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, - eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, - eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, - eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, - eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, - eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, - eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, - eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, - eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, - eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, - eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, - eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, - eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, - ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, - eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, - eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, - eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, - eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, - eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, - eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, - eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, - eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, - eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, - eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, - eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, - eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, - eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, - eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, - eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, - eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, - eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, - eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, - eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, - eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, - ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, - eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, - ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, - ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, - eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, - eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, - eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, - eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, - ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, - ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, - eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, - eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, - ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, - ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, - ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, - ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, - ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, - ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, - ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, - eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, - eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, - ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, - eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, - eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, - eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, - eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, - eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, - eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, - ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, - ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, - eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, - eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, - ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, - ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, - ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, - ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, - eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, - eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, - eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, - eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, - eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, - eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, - eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, - eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, - ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, - eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, - eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, - ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, - ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, - ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, - eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, - eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, - ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, - eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, - eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, - ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, - ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, - ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, - eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, - ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, - eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, - ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, - ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, - ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, - ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, - ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, - ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, - eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, - ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, - ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, - ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, - ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, - ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, - ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, - ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, - eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, - eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, - ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, - eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, - eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, - ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, - ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, - ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, - ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, - ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, - ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, - eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, - eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, - eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, - eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, - eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, - ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, - ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, - ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, - ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, - ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, - ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, - ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, - ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, - eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, - eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, - ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, - ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, - eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, - eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, - eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, - eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, - ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, - eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, - ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, - ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, - ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, - eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, - ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, - ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, - ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, - eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, - eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, - eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, - eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, - ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, - ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, - ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, - ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, - ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, - ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, - ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, - ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, - ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, - ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, - ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, - ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, - ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, - eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, - eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, - ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, - ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, - ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, - ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, - eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, - eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, - ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, - eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, - ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, - eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, - ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, - eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, - eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, - eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, - eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, - eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, - eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, - ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, - eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, - ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, - eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, - ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, - eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, - ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, - eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, - eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, - eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, - ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, - eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, - eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, - ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, - eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, - eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, - eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, - ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, - eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, - eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, - ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, - ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, - ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, - eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, - eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, - eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, - eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, - eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, - eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, - eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, - eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, - ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, - eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, - eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, - eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, - ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, - eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, - ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( StructureType value ) - { - switch ( value ) - { - case StructureType::eApplicationInfo : return "ApplicationInfo"; - case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo"; - case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo"; - case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo"; - case StructureType::eSubmitInfo : return "SubmitInfo"; - case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo"; - case StructureType::eMappedMemoryRange : return "MappedMemoryRange"; - case StructureType::eBindSparseInfo : return "BindSparseInfo"; - case StructureType::eFenceCreateInfo : return "FenceCreateInfo"; - case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo"; - case StructureType::eEventCreateInfo : return "EventCreateInfo"; - case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo"; - case StructureType::eBufferCreateInfo : return "BufferCreateInfo"; - case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo"; - case StructureType::eImageCreateInfo : return "ImageCreateInfo"; - case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo"; - case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo"; - case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo"; - case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo"; - case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo"; - case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo"; - case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo"; - case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo"; - case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo"; - case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo"; - case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo"; - case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo"; - case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo"; - case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo"; - case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo"; - case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo"; - case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo"; - case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo"; - case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo"; - case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo"; - case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet"; - case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet"; - case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo"; - case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo"; - case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo"; - case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo"; - case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo"; - case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo"; - case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo"; - case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier"; - case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier"; - case StructureType::eMemoryBarrier : return "MemoryBarrier"; - case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo"; - case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo"; - case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties"; - case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo"; - case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo"; - case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures"; - case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements"; - case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo"; - case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo"; - case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo"; - case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo"; - case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo"; - case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo"; - case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo"; - case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo"; - case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties"; - case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo"; - case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2"; - case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2"; - case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2"; - case StructureType::eMemoryRequirements2 : return "MemoryRequirements2"; - case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2"; - case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2"; - case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2"; - case StructureType::eFormatProperties2 : return "FormatProperties2"; - case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2"; - case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2"; - case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2"; - case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2"; - case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2"; - case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2"; - case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties"; - case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo"; - case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo"; - case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo"; - case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo"; - case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures"; - case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties"; - case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures"; - case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo"; - case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures"; - case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties"; - case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2"; - case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo"; - case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo"; - case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo"; - case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo"; - case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures"; - case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties"; - case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo"; - case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo"; - case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties"; - case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo"; - case StructureType::eExternalBufferProperties : return "ExternalBufferProperties"; - case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties"; - case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo"; - case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo"; - case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo"; - case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo"; - case StructureType::eExternalFenceProperties : return "ExternalFenceProperties"; - case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo"; - case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo"; - case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo"; - case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties"; - case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties"; - case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport"; - case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures"; - case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR"; - case StructureType::ePresentInfoKHR : return "PresentInfoKHR"; - case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR"; - case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR"; - case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR"; - case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR"; - case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR"; - case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR"; - case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR"; - case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR"; - case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR"; - case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR"; - case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR"; - case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR"; - case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR"; - case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR"; - case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT"; - case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD"; - case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT"; - case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT"; - case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT"; - case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV"; - case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV"; - case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV"; - case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT"; - case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT"; - case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT"; - case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX"; - case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD"; - case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP"; - case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV"; - case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV"; - case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV"; - case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV"; - case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV"; - case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV"; - case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT"; - case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN"; - case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; - case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT"; - case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT"; - case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR"; - case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR"; - case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR"; - case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR"; - case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR"; - case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR"; - case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR"; - case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR"; - case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR"; - case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR"; - case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR"; - case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR"; - case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR"; - case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR"; - case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR"; - case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT"; - case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT"; - case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT"; - case StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR : return "PhysicalDeviceShaderFloat16Int8FeaturesKHR"; - case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR"; - case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX"; - case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX"; - case StructureType::eCmdProcessCommandsInfoNVX : return "CmdProcessCommandsInfoNVX"; - case StructureType::eCmdReserveSpaceForCommandsInfoNVX : return "CmdReserveSpaceForCommandsInfoNVX"; - case StructureType::eDeviceGeneratedCommandsLimitsNVX : return "DeviceGeneratedCommandsLimitsNVX"; - case StructureType::eDeviceGeneratedCommandsFeaturesNVX : return "DeviceGeneratedCommandsFeaturesNVX"; - case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV"; - case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT"; - case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT"; - case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT"; - case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT"; - case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT"; - case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE"; - case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; - case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV"; - case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT"; - case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; - case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT"; - case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT"; - case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT"; - case StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR : return "PhysicalDeviceImagelessFramebufferFeaturesKHR"; - case StructureType::eFramebufferAttachmentsCreateInfoKHR : return "FramebufferAttachmentsCreateInfoKHR"; - case StructureType::eFramebufferAttachmentImageInfoKHR : return "FramebufferAttachmentImageInfoKHR"; - case StructureType::eRenderPassAttachmentBeginInfoKHR : return "RenderPassAttachmentBeginInfoKHR"; - case StructureType::eAttachmentDescription2KHR : return "AttachmentDescription2KHR"; - case StructureType::eAttachmentReference2KHR : return "AttachmentReference2KHR"; - case StructureType::eSubpassDescription2KHR : return "SubpassDescription2KHR"; - case StructureType::eSubpassDependency2KHR : return "SubpassDependency2KHR"; - case StructureType::eRenderPassCreateInfo2KHR : return "RenderPassCreateInfo2KHR"; - case StructureType::eSubpassBeginInfoKHR : return "SubpassBeginInfoKHR"; - case StructureType::eSubpassEndInfoKHR : return "SubpassEndInfoKHR"; - case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR"; - case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR"; - case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR"; - case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR"; - case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR"; - case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR"; - case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR"; - case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR"; - case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR"; - case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR"; - case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR"; - case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR"; - case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR"; - case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR"; - case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK"; - case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK"; - case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT"; - case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT"; - case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT"; - case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT"; - case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT"; - case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID"; - case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID"; - case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID"; - case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID"; - case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID"; - case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID"; - case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT : return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT"; - case StructureType::eSamplerReductionModeCreateInfoEXT : return "SamplerReductionModeCreateInfoEXT"; - case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; - case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; - case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT"; - case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; - case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT"; - case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT"; - case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT"; - case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT"; - case StructureType::eImageFormatListCreateInfoKHR : return "ImageFormatListCreateInfoKHR"; - case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; - case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; - case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT"; - case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV"; - case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; - case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; - case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT"; - case StructureType::eDrmFormatModifierPropertiesEXT : return "DrmFormatModifierPropertiesEXT"; - case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; - case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT"; - case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT"; - case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT"; - case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT"; - case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT"; - case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT : return "DescriptorSetLayoutBindingFlagsCreateInfoEXT"; - case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT : return "PhysicalDeviceDescriptorIndexingFeaturesEXT"; - case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT : return "PhysicalDeviceDescriptorIndexingPropertiesEXT"; - case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT : return "DescriptorSetVariableDescriptorCountAllocateInfoEXT"; - case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT : return "DescriptorSetVariableDescriptorCountLayoutSupportEXT"; - case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; - case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; - case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; - case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; - case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV"; - case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV"; - case StructureType::eGeometryNV : return "GeometryNV"; - case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; - case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; - case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV"; - case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV"; - case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; - case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; - case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; - case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV"; - case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; - case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; - case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT"; - case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT"; - case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT"; - case StructureType::ePhysicalDevice8BitStorageFeaturesKHR : return "PhysicalDevice8BitStorageFeaturesKHR"; - case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT"; - case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT"; - case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; - case StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR : return "PhysicalDeviceShaderAtomicInt64FeaturesKHR"; - case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD"; - case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT"; - case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD"; - case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; - case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; - case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP"; - case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT"; - case StructureType::ePhysicalDeviceDriverPropertiesKHR : return "PhysicalDeviceDriverPropertiesKHR"; - case StructureType::ePhysicalDeviceFloatControlsPropertiesKHR : return "PhysicalDeviceFloatControlsPropertiesKHR"; - case StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR : return "PhysicalDeviceDepthStencilResolvePropertiesKHR"; - case StructureType::eSubpassDescriptionDepthStencilResolveKHR : return "SubpassDescriptionDepthStencilResolveKHR"; - case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; - case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV"; - case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV"; - case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; - case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV"; - case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV"; - case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV"; - case StructureType::eCheckpointDataNV : return "CheckpointDataNV"; - case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV"; - case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; - case StructureType::eQueryPoolCreateInfoINTEL : return "QueryPoolCreateInfoINTEL"; - case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL"; - case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL"; - case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL"; - case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL"; - case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL"; - case StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR : return "PhysicalDeviceVulkanMemoryModelFeaturesKHR"; - case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT"; - case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD"; - case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD"; - case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA"; - case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; - case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; - case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT"; - case StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT : return "PhysicalDeviceScalarBlockLayoutFeaturesEXT"; - case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; - case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; - case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; - case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD"; - case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD"; - case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT"; - case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT"; - case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT"; - case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR"; - case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; - case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; - case StructureType::eBufferDeviceAddressInfoEXT : return "BufferDeviceAddressInfoEXT"; - case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT"; - case StructureType::eImageStencilUsageCreateInfoEXT : return "ImageStencilUsageCreateInfoEXT"; - case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT"; - case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV"; - case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV"; - case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV"; - case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV"; - case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV"; - case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV"; - case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; - case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; - case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR : return "PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR"; - case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT"; - case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT"; - case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT"; - case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT"; - case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT"; - case StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT : return "PhysicalDeviceHostQueryResetFeaturesEXT"; - case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; - case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; - case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR"; - case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR"; - case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR"; - case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR"; - case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR"; - case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; - case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; - case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; - default: return "invalid"; + VkResult vkGetPerformanceParameterINTEL( VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); } - } - enum class SubpassContents - { - eInline = VK_SUBPASS_CONTENTS_INLINE, - eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - }; + //=== VK_AMD_display_native_hdr === - VULKAN_HPP_INLINE std::string to_string( SubpassContents value ) - { - switch ( value ) + void vkSetLocalDimmingAMD( VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT { - case SubpassContents::eInline : return "Inline"; - case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers"; - default: return "invalid"; + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); } - } - enum class SystemAllocationScope - { - eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, - eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, - eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, - eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - }; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === - VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) - { - switch ( value ) + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - case SystemAllocationScope::eCommand : return "Command"; - case SystemAllocationScope::eObject : return "Object"; - case SystemAllocationScope::eCache : return "Cache"; - case SystemAllocationScope::eDevice : return "Device"; - case SystemAllocationScope::eInstance : return "Instance"; - default: return "invalid"; + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); } - } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - enum class TessellationDomainOrigin - { - eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, - eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, - eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, - eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR - }; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === - VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value ) - { - switch ( value ) + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - case TessellationDomainOrigin::eUpperLeft : return "UpperLeft"; - case TessellationDomainOrigin::eLowerLeft : return "LowerLeft"; - default: return "invalid"; + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); } - } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ - enum class TimeDomainEXT - { - eDevice = VK_TIME_DOMAIN_DEVICE_EXT, - eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, - eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, - eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - }; + //=== VK_KHR_fragment_shading_rate === - VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT { - case TimeDomainEXT::eDevice : return "Device"; - case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic"; - case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw"; - case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter"; - default: return "invalid"; + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( + physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); } - } - enum class ValidationCacheHeaderVersionEXT - { - eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) - { - switch ( value ) + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const + VULKAN_HPP_NOEXCEPT { - case ValidationCacheHeaderVersionEXT::eOne : return "One"; - default: return "invalid"; + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); } - } - enum class ValidationCheckEXT - { - eAll = VK_VALIDATION_CHECK_ALL_EXT, - eShaders = VK_VALIDATION_CHECK_SHADERS_EXT - }; + //=== VK_EXT_buffer_device_address === - VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value ) - { - switch ( value ) + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { - case ValidationCheckEXT::eAll : return "All"; - case ValidationCheckEXT::eShaders : return "Shaders"; - default: return "invalid"; + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); } - } - enum class ValidationFeatureDisableEXT - { - eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, - eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, - eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, - eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, - eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, - eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, - eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - }; + //=== VK_EXT_tooling_info === - VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) - { - switch ( value ) + VkResult + vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT { - case ValidationFeatureDisableEXT::eAll : return "All"; - case ValidationFeatureDisableEXT::eShaders : return "Shaders"; - case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety"; - case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters"; - case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes"; - case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks"; - case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles"; - default: return "invalid"; + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); } - } - enum class ValidationFeatureEnableEXT - { - eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, - eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, - eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - }; + //=== VK_KHR_present_wait === - VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) - { - switch ( value ) + VkResult vkWaitForPresentKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { - case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted"; - case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot"; - case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices"; - default: return "invalid"; + return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); } - } - enum class VendorId - { - eVIV = VK_VENDOR_ID_VIV, - eVSI = VK_VENDOR_ID_VSI, - eKazan = VK_VENDOR_ID_KAZAN - }; + //=== VK_NV_cooperative_matrix === - VULKAN_HPP_INLINE std::string to_string( VendorId value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const + VULKAN_HPP_NOEXCEPT { - case VendorId::eVIV : return "VIV"; - case VendorId::eVSI : return "VSI"; - case VendorId::eKazan : return "Kazan"; - default: return "invalid"; + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); } - } - enum class VertexInputRate - { - eVertex = VK_VERTEX_INPUT_RATE_VERTEX, - eInstance = VK_VERTEX_INPUT_RATE_INSTANCE - }; + //=== VK_NV_coverage_reduction_mode === - VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t * pCombinationCount, + VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT { - case VertexInputRate::eVertex : return "Vertex"; - case VertexInputRate::eInstance : return "Instance"; - default: return "invalid"; + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + physicalDevice, pCombinationCount, pCombinations ); } - } - enum class ViewportCoordinateSwizzleNV - { - ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, - eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, - ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, - eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, - ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, - eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, - ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, - eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - }; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === - VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value ) - { - switch ( value ) + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT { - case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX"; - case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX"; - case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY"; - case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY"; - case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ"; - case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ"; - case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW"; - case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW"; - default: return "invalid"; + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( + physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); } - } - template - struct cpp_type - { - }; - - enum class AccessFlagBits - { - eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, - eIndexRead = VK_ACCESS_INDEX_READ_BIT, - eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, - eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, - eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, - eShaderRead = VK_ACCESS_SHADER_READ_BIT, - eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, - eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, - eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, - eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, - eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, - eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, - eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, - eHostRead = VK_ACCESS_HOST_READ_BIT, - eHostWrite = VK_ACCESS_HOST_WRITE_BIT, - eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, - eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, - eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, - eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, - eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, - eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, - eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX, - eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX, - eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, - eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, - eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) - { - switch ( value ) - { - case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead"; - case AccessFlagBits::eIndexRead : return "IndexRead"; - case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead"; - case AccessFlagBits::eUniformRead : return "UniformRead"; - case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead"; - case AccessFlagBits::eShaderRead : return "ShaderRead"; - case AccessFlagBits::eShaderWrite : return "ShaderWrite"; - case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead"; - case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite"; - case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead"; - case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite"; - case AccessFlagBits::eTransferRead : return "TransferRead"; - case AccessFlagBits::eTransferWrite : return "TransferWrite"; - case AccessFlagBits::eHostRead : return "HostRead"; - case AccessFlagBits::eHostWrite : return "HostWrite"; - case AccessFlagBits::eMemoryRead : return "MemoryRead"; - case AccessFlagBits::eMemoryWrite : return "MemoryWrite"; - case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT"; - case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT"; - case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT"; - case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT"; - case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX"; - case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX"; - case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT"; - case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV"; - case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV"; - case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV"; - case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT"; - default: return "invalid"; + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); } - } - using AccessFlags = Flags; + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } - VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) - { - return AccessFlags( bit0 ) | bit1; - } + VkResult + vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits ) - { - return ~( AccessFlags( bits ) ); - } + //=== VK_EXT_headless_surface === - template <> struct FlagTraits - { - enum - { - allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | "; - if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | "; - if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | "; - if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | "; - if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | "; - if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | "; - if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | "; - if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | "; - if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | "; - if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | "; - if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | "; - if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | "; - if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | "; - if ( value & AccessFlagBits::eHostRead ) result += "HostRead | "; - if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | "; - if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | "; - if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | "; - if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | "; - if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | "; - if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | "; - if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | "; - if ( value & AccessFlagBits::eCommandProcessReadNVX ) result += "CommandProcessReadNVX | "; - if ( value & AccessFlagBits::eCommandProcessWriteNVX ) result += "CommandProcessWriteNVX | "; - if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; - if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | "; - if ( value & AccessFlagBits::eAccelerationStructureReadNV ) result += "AccelerationStructureReadNV | "; - if ( value & AccessFlagBits::eAccelerationStructureWriteNV ) result += "AccelerationStructureWriteNV | "; - if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - enum class AndroidSurfaceCreateFlagBitsKHR - {}; + //=== VK_KHR_buffer_device_address === - VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) - { - return "(void)"; - } + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } - using AndroidSurfaceCreateFlagsKHR = Flags; + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, + const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } - VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) - { - return "{}"; - } -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } - enum class AttachmentDescriptionFlagBits - { - eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT - }; + //=== VK_EXT_line_rasterization === - VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) - { - switch ( value ) + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { - case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias"; - default: return "invalid"; + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); } - } - using AttachmentDescriptionFlags = Flags; + //=== VK_EXT_host_query_reset === - VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) - { - return AttachmentDescriptionFlags( bit0 ) | bit1; - } + void vkResetQueryPoolEXT( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } - VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) - { - return ~( AttachmentDescriptionFlags( bits ) ); - } + //=== VK_EXT_extended_dynamic_state === - template <> struct FlagTraits - { - enum + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) - }; - }; + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } - VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) - { - if ( !value ) return "{}"; - std::string result; + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } - if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } - enum class BufferCreateFlagBits - { - eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, - eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, - eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT - }; + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } - VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) - { - switch ( value ) + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { - case BufferCreateFlagBits::eSparseBinding : return "SparseBinding"; - case BufferCreateFlagBits::eSparseResidency : return "SparseResidency"; - case BufferCreateFlagBits::eSparseAliased : return "SparseAliased"; - case BufferCreateFlagBits::eProtected : return "Protected"; - case BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT : return "DeviceAddressCaptureReplayEXT"; - default: return "invalid"; + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); } - } - using BufferCreateFlags = Flags; + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( + commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } - VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) - { - return BufferCreateFlags( bit0 ) | bit1; - } + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } - VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits ) - { - return ~( BufferCreateFlags( bits ) ); - } + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } - template <> struct FlagTraits - { - enum + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT) - }; - }; + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } - VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } - if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; - if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; - if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | "; - if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT ) result += "DeviceAddressCaptureReplayEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } - enum class BufferUsageFlagBits - { - eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, - eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, - eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, - eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, - eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, - eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, - eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, - eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, - eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, - eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) - { - switch ( value ) - { - case BufferUsageFlagBits::eTransferSrc : return "TransferSrc"; - case BufferUsageFlagBits::eTransferDst : return "TransferDst"; - case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; - case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; - case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer"; - case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer"; - case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer"; - case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer"; - case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer"; - case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; - case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT"; - case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV"; - case BufferUsageFlagBits::eShaderDeviceAddressEXT : return "ShaderDeviceAddressEXT"; - default: return "invalid"; + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); } - } - using BufferUsageFlags = Flags; + //=== VK_KHR_deferred_host_operations === - VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) - { - return BufferUsageFlags( bit0 ) | bit1; - } + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } - VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits ) - { - return ~( BufferUsageFlags( bits ) ); - } + void vkDestroyDeferredOperationKHR( VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } - template <> struct FlagTraits - { - enum + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddressEXT) - }; - }; + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) - { - if ( !value ) return "{}"; - std::string result; + VkResult vkGetDeferredOperationResultKHR( VkDevice device, + VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } - if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; - if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; - if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | "; - if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | "; - if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | "; - if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | "; - if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | "; - if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; - if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; - if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | "; - if ( value & BufferUsageFlagBits::eShaderDeviceAddressEXT ) result += "ShaderDeviceAddressEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } - enum class BufferViewCreateFlagBits - {}; + //=== VK_KHR_pipeline_executable_properties === - VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) - { - return "(void)"; - } + VkResult + vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } - using BufferViewCreateFlags = Flags; + VkResult + vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } - VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) - { - return "{}"; - } + VkResult vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( + device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } - enum class BuildAccelerationStructureFlagBitsNV - { - eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV, - eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV, - ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV, - ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV, - eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV - }; + //=== VK_NV_device_generated_commands === - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value ) - { - switch ( value ) + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT { - case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate"; - case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction"; - case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace"; - case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild"; - case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory"; - default: return "invalid"; + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); } - } - using BuildAccelerationStructureFlagsNV = Flags; - - VULKAN_HPP_INLINE BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) - { - return BuildAccelerationStructureFlagsNV( bit0 ) | bit1; - } + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } - VULKAN_HPP_INLINE BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) - { - return ~( BuildAccelerationStructureFlagsNV( bits ) ); - } + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } - template <> struct FlagTraits - { - enum + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory) - }; - }; + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } - VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value ) - { - if ( !value ) return "{}"; - std::string result; + VkResult + vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } - if ( value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate ) result += "AllowUpdate | "; - if ( value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction ) result += "AllowCompaction | "; - if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace ) result += "PreferFastTrace | "; - if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild ) result += "PreferFastBuild | "; - if ( value & BuildAccelerationStructureFlagBitsNV::eLowMemory ) result += "LowMemory | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } - enum class ColorComponentFlagBits - { - eR = VK_COLOR_COMPONENT_R_BIT, - eG = VK_COLOR_COMPONENT_G_BIT, - eB = VK_COLOR_COMPONENT_B_BIT, - eA = VK_COLOR_COMPONENT_A_BIT - }; + //=== VK_EXT_acquire_drm_display === - VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) - { - switch ( value ) + VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, + int32_t drmFd, + VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { - case ColorComponentFlagBits::eR : return "R"; - case ColorComponentFlagBits::eG : return "G"; - case ColorComponentFlagBits::eB : return "B"; - case ColorComponentFlagBits::eA : return "A"; - default: return "invalid"; + return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); } - } - using ColorComponentFlags = Flags; + VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); + } - VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) - { - return ColorComponentFlags( bit0 ) | bit1; - } + //=== VK_EXT_private_data === - VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits ) - { - return ~( ColorComponentFlags( bits ) ); - } + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } - template <> struct FlagTraits - { - enum + void vkDestroyPrivateDataSlotEXT( VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) - }; - }; + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } - VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) - { - if ( !value ) return "{}"; - std::string result; + VkResult vkSetPrivateDataEXT( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } - if ( value & ColorComponentFlagBits::eR ) result += "R | "; - if ( value & ColorComponentFlagBits::eG ) result += "G | "; - if ( value & ColorComponentFlagBits::eB ) result += "B | "; - if ( value & ColorComponentFlagBits::eA ) result += "A | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkGetPrivateDataEXT( VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } - enum class CommandBufferResetFlagBits - { - eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT - }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === - VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) - { - switch ( value ) + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT { - case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources"; - default: return "invalid"; + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); } - } - - using CommandBufferResetFlags = Flags; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) - { - return CommandBufferResetFlags( bit0 ) | bit1; - } + //=== VK_KHR_synchronization2 === - VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) - { - return ~( CommandBufferResetFlags( bits ) ); - } + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } - template <> struct FlagTraits - { - enum + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) - }; - }; + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } - VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) - { - if ( !value ) return "{}"; - std::string result; + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } - if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, + const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } - enum class CommandBufferUsageFlagBits - { - eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, - eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, - eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT - }; + void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } - VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) - { - switch ( value ) + VkResult vkQueueSubmit2KHR( VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2 * pSubmits, + VkFence fence ) const VULKAN_HPP_NOEXCEPT { - case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit"; - case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue"; - case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse"; - default: return "invalid"; + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); } - } - using CommandBufferUsageFlags = Flags; + void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } - VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) - { - return CommandBufferUsageFlags( bit0 ) | bit1; - } + void vkGetQueueCheckpointData2NV( VkQueue queue, + uint32_t * pCheckpointDataCount, + VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } - VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) - { - return ~( CommandBufferUsageFlags( bits ) ); - } + //=== VK_NV_fragment_shading_rate_enums === - template <> struct FlagTraits - { - enum + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const + VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) - { - if ( !value ) return "{}"; - std::string result; + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } - if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | "; - if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | "; - if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + //=== VK_KHR_copy_commands2 === - enum class CommandPoolCreateFlagBits - { - eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, - eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, - eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT - }; + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } - VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) - { - switch ( value ) + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, + const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT { - case CommandPoolCreateFlagBits::eTransient : return "Transient"; - case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer"; - case CommandPoolCreateFlagBits::eProtected : return "Protected"; - default: return "invalid"; + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); } - } - using CommandPoolCreateFlags = Flags; + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } - VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) - { - return CommandPoolCreateFlags( bit0 ) | bit1; - } + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } - VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) - { - return ~( CommandPoolCreateFlags( bits ) ); - } + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, + const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } - template <> struct FlagTraits - { - enum + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, + const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) - }; - }; + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } - VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === - if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | "; - if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | "; - if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } - enum class CommandPoolResetFlagBits - { - eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT - }; + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) - { - switch ( value ) +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT { - case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources"; - default: return "invalid"; + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); } - } - using CommandPoolResetFlags = Flags; + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) - { - return CommandPoolResetFlags( bit0 ) | bit1; - } + //=== VK_KHR_ray_tracing_pipeline === - VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) - { - return ~( CommandPoolResetFlags( bits ) ); - } + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + width, + height, + depth ); + } - template <> struct FlagTraits - { - enum + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) - }; - }; + return ::vkCreateRayTracingPipelinesKHR( + device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } - VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) - { - if ( !value ) return "{}"; - std::string result; + VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } - if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + device, pipeline, firstGroup, groupCount, dataSize, pData ); + } - enum class CommandPoolTrimFlagBits - {}; + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + indirectDeviceAddress ); + } - VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) - { - return "(void)"; - } + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } - using CommandPoolTrimFlags = Flags; + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } - using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + //=== VK_EXT_vertex_input_dynamic_state === - VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) - { - return "{}"; - } + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( commandBuffer, + vertexBindingDescriptionCount, + pVertexBindingDescriptions, + vertexAttributeDescriptionCount, + pVertexAttributeDescriptions ); + } - enum class CompositeAlphaFlagBitsKHR - { - eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, - ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, - ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, - eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR - }; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === - VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) - { - switch ( value ) + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT { - case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque"; - case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied"; - case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied"; - case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit"; - default: return "invalid"; + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); } - } - using CompositeAlphaFlagsKHR = Flags; + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( + device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) - { - return CompositeAlphaFlagsKHR( bit0 ) | bit1; - } +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === - VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) - { - return ~( CompositeAlphaFlagsKHR( bits ) ); - } + VkResult vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } - template <> struct FlagTraits - { - enum + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) - }; - }; + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) - { - if ( !value ) return "{}"; - std::string result; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === - if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | "; - if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | "; - if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); + } - enum class ConditionalRenderingFlagBitsEXT - { - eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT - }; + VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); + } - VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) - { - switch ( value ) + VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT { - case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted"; - default: return "invalid"; + return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); } - } - using ConditionalRenderingFlagsEXT = Flags; + void vkDestroyBufferCollectionFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); + } - VULKAN_HPP_INLINE ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) - { - return ConditionalRenderingFlagsEXT( bit0 ) | bit1; - } + VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA * pProperties ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - VULKAN_HPP_INLINE ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) - { - return ~( ConditionalRenderingFlagsEXT( bits ) ); - } + //=== VK_HUAWEI_subpass_shading === - template <> struct FlagTraits - { - enum + VkResult vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, + VkRenderPass renderpass, + VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) - { - if ( !value ) return "{}"; - std::string result; + return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); + } - if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); + } - enum class CullModeFlagBits - { - eNone = VK_CULL_MODE_NONE, - eFront = VK_CULL_MODE_FRONT_BIT, - eBack = VK_CULL_MODE_BACK_BIT, - eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK - }; + //=== VK_HUAWEI_invocation_mask === - VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) - { - switch ( value ) + void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { - case CullModeFlagBits::eNone : return "None"; - case CullModeFlagBits::eFront : return "Front"; - case CullModeFlagBits::eBack : return "Back"; - case CullModeFlagBits::eFrontAndBack : return "FrontAndBack"; - default: return "invalid"; + return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); } - } - using CullModeFlags = Flags; + //=== VK_NV_external_memory_rdma === - VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) - { - return CullModeFlags( bit0 ) | bit1; - } + VkResult vkGetMemoryRemoteAddressNV( VkDevice device, + const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); + } - VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits ) - { - return ~( CullModeFlags( bits ) ); - } + //=== VK_EXT_extended_dynamic_state2 === - template <> struct FlagTraits - { - enum + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, + uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & CullModeFlagBits::eFront ) result += "Front | "; - if ( value & CullModeFlagBits::eBack ) result += "Back | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DebugReportFlagBitsEXT - { - eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, - eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, - ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, - eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, - eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT - }; + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } - VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) - { - switch ( value ) + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { - case DebugReportFlagBitsEXT::eInformation : return "Information"; - case DebugReportFlagBitsEXT::eWarning : return "Warning"; - case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning"; - case DebugReportFlagBitsEXT::eError : return "Error"; - case DebugReportFlagBitsEXT::eDebug : return "Debug"; - default: return "invalid"; + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); } - } - using DebugReportFlagsEXT = Flags; - - VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) - { - return DebugReportFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) - { - return ~( DebugReportFlagsEXT( bits ) ); - } + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } - template <> struct FlagTraits - { - enum + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) - }; - }; + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } - VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) - { - if ( !value ) return "{}"; - std::string result; + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } - if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | "; - if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | "; - if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | "; - if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | "; - if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === - enum class DebugUtilsMessageSeverityFlagBitsEXT - { - eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, - eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, - eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, - eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT - }; + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) - { - switch ( value ) + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT { - case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose"; - case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info"; - case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning"; - case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error"; - default: return "invalid"; + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); } - } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - using DebugUtilsMessageSeverityFlagsEXT = Flags; + //=== VK_EXT_color_write_enable === - VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; - } + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } - VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) - { - return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); - } + //=== VK_EXT_multi_draw === - template <> struct FlagTraits - { - enum + void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) - { - if ( !value ) return "{}"; - std::string result; + return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); + } - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | "; - if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiIndexedEXT( + commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); + } - enum class DebugUtilsMessageTypeFlagBitsEXT - { - eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, - eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, - ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT - }; + //=== VK_EXT_pageable_device_local_memory === - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) - { - switch ( value ) + void + vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT { - case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General"; - case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation"; - case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance"; - default: return "invalid"; + return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); } - } - using DebugUtilsMessageTypeFlagsEXT = Flags; + //=== VK_KHR_maintenance4 === - VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; - } + void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } - VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) - { - return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); - } + void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } - template <> struct FlagTraits - { - enum + void vkGetDeviceImageSparseMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) - }; + return ::vkGetDeviceImageSparseMemoryRequirementsKHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } }; +#endif - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | "; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | "; - if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + class DispatchLoaderDynamic; +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif +#endif - enum class DebugUtilsMessengerCallbackDataFlagBitsEXT - {}; +#if !defined( VULKAN_HPP_STORAGE_API ) +# if defined( VULKAN_HPP_STORAGE_SHARED ) +# if defined( _MSC_VER ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +# elif defined( __clang__ ) || defined( __GNUC__ ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) +# else +# define VULKAN_HPP_STORAGE_API +# endif +# else +# define VULKAN_HPP_STORAGE_API +# pragma warning Unknown import / export semantics +# endif +# else +# define VULKAN_HPP_STORAGE_API +# endif +#endif - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# else + static inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() { - return "(void)"; + static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls; + return dls; } +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif - using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# endif +#endif - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) - { - return "{}"; - } +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER +#endif - enum class DebugUtilsMessengerCreateFlagBitsEXT - {}; + struct AllocationCallbacks; - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + template + class ObjectDestroy { - return "(void)"; - } - - using DebugUtilsMessengerCreateFlagsEXT = Flags; + public: + ObjectDestroy() = default; - VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) - { - return "{}"; - } + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - enum class DependencyFlagBits - { - eByRegion = VK_DEPENDENCY_BY_REGION_BIT, - eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, - eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, - eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, - eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR - }; + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) - { - switch ( value ) + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT { - case DependencyFlagBits::eByRegion : return "ByRegion"; - case DependencyFlagBits::eDeviceGroup : return "DeviceGroup"; - case DependencyFlagBits::eViewLocal : return "ViewLocal"; - default: return "invalid"; + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); } - } - using DependencyFlags = Flags; + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; - VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) + template + class ObjectDestroy { - return DependencyFlags( bit0 ) | bit1; - } + public: + ObjectDestroy() = default; - VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits ) - { - return ~( DependencyFlags( bits ) ); - } + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - template <> struct FlagTraits - { - enum + Optional getAllocator() const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) - { - if ( !value ) return "{}"; - std::string result; + return m_allocationCallbacks; + } - if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | "; - if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | "; - if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } - enum class DescriptorBindingFlagBitsEXT - { - eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, - eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, - ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT, - eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; - VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBitsEXT value ) + template + class ObjectFree { - switch ( value ) - { - case DescriptorBindingFlagBitsEXT::eUpdateAfterBind : return "UpdateAfterBind"; - case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending"; - case DescriptorBindingFlagBitsEXT::ePartiallyBound : return "PartiallyBound"; - case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount : return "VariableDescriptorCount"; - default: return "invalid"; - } - } + public: + ObjectFree() = default; - using DescriptorBindingFlagsEXT = Flags; + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} - VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) - { - return DescriptorBindingFlagsEXT( bit0 ) | bit1; - } + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } - VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits ) - { - return ~( DescriptorBindingFlagsEXT( bits ) ); - } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } - template <> struct FlagTraits - { - enum + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) - }; + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.free( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; }; - VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagsEXT value ) + template + class ObjectRelease { - if ( !value ) return "{}"; - std::string result; + public: + ObjectRelease() = default; - if ( value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind ) result += "UpdateAfterBind | "; - if ( value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | "; - if ( value & DescriptorBindingFlagBitsEXT::ePartiallyBound ) result += "PartiallyBound | "; - if ( value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount ) result += "VariableDescriptorCount | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + ObjectRelease( OwnerType owner, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + {} - enum class DescriptorPoolCreateFlagBits - { - eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, - eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT - }; + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) - { - switch ( value ) + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT { - case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet"; - case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT : return "UpdateAfterBindEXT"; - default: return "invalid"; + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); } - } - using DescriptorPoolCreateFlags = Flags; + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; - VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) + template + class PoolFree { - return DescriptorPoolCreateFlags( bit0 ) | bit1; - } + public: + PoolFree() = default; - VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) - { - return ~( DescriptorPoolCreateFlags( bits ) ); - } + PoolFree( OwnerType owner, + PoolType pool, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + {} - template <> struct FlagTraits - { - enum + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) - }; - }; + return m_owner; + } + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + m_owner.free( m_pool, t, *m_dispatch ); + } - if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | "; - if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT ) result += "UpdateAfterBindEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; - enum class DescriptorPoolResetFlagBits - {}; + //================== + //=== BASE TYPEs === + //================== - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) - { - return "(void)"; - } + using Bool32 = uint32_t; + using DeviceAddress = uint64_t; + using DeviceSize = uint64_t; + using RemoteAddressNV = void *; + using SampleMask = uint32_t; - using DescriptorPoolResetFlags = Flags; +} // namespace VULKAN_HPP_NAMESPACE - VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) - { - return "{}"; - } +#include - enum class DescriptorSetLayoutCreateFlagBits - { - ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, - eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT - }; +#ifndef VULKAN_HPP_NO_EXCEPTIONS +namespace std +{ + template <> + struct is_error_code_enum : public true_type + {}; +} // namespace std +#endif - VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) +namespace VULKAN_HPP_NAMESPACE +{ +#ifndef VULKAN_HPP_NO_EXCEPTIONS + + class ErrorCategoryImpl : public std::error_category { - switch ( value ) + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override { - case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR"; - case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT : return "UpdateAfterBindPoolEXT"; - default: return "invalid"; + return VULKAN_HPP_NAMESPACE_STRING "::Result"; } - } - - using DescriptorSetLayoutCreateFlags = Flags; + virtual std::string message( int ev ) const override + { + return to_string( static_cast( ev ) ); + } + }; - VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) + class Error { - return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; - } + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) - { - return ~( DescriptorSetLayoutCreateFlags( bits ) ); - } + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; + }; - template <> struct FlagTraits + class LogicError + : public Error + , public std::logic_error { - enum + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) - }; + return std::logic_error::what(); + } }; - VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + class SystemError + : public Error + , public std::system_error { - if ( !value ) return "{}"; - std::string result; - - if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | "; - if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT ) result += "UpdateAfterBindPoolEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + SystemError( int ev, std::error_category const & ecat, std::string const & what ) + : Error(), std::system_error( ev, ecat, what ) + {} + SystemError( int ev, std::error_category const & ecat, char const * what ) + : Error(), std::system_error( ev, ecat, what ) + {} - enum class DescriptorUpdateTemplateCreateFlagBits - {}; + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } + }; - VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT { - return "(void)"; + static ErrorCategoryImpl instance; + return instance; } - using DescriptorUpdateTemplateCreateFlags = Flags; - - using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; - - VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT { - return "{}"; + return std::error_code( static_cast( e ), errorCategory() ); } - enum class DeviceCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT { - return "(void)"; + return std::error_condition( static_cast( e ), errorCategory() ); } - using DeviceCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + class OutOfHostMemoryError : public SystemError { - return "{}"; - } + public: + OutOfHostMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} + OutOfHostMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) + {} + }; - enum class DeviceGroupPresentModeFlagBitsKHR + class OutOfDeviceMemoryError : public SystemError { - eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, - eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, - eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, - eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + public: + OutOfDeviceMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} + OutOfDeviceMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + class InitializationFailedError : public SystemError { - switch ( value ) - { - case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local"; - case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote"; - case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum"; - case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice"; - default: return "invalid"; - } - } - - using DeviceGroupPresentModeFlagsKHR = Flags; - - VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; - } + public: + InitializationFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} + InitializationFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) + {} + }; - VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) + class DeviceLostError : public SystemError { - return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); - } + public: + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) + {} + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; - template <> struct FlagTraits + class MemoryMapFailedError : public SystemError { - enum - { - allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) - }; + public: + MemoryMapFailedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} + MemoryMapFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + class LayerNotPresentError : public SystemError { - if ( !value ) return "{}"; - std::string result; - - if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | "; - if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + public: + LayerNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} + LayerNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) + {} + }; - enum class DeviceQueueCreateFlagBits + class ExtensionNotPresentError : public SystemError { - eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + public: + ExtensionNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} + ExtensionNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) + class FeatureNotPresentError : public SystemError { - switch ( value ) - { - case DeviceQueueCreateFlagBits::eProtected : return "Protected"; - default: return "invalid"; - } - } - - using DeviceQueueCreateFlags = Flags; + public: + FeatureNotPresentError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} + FeatureNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) + {} + }; - VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) + class IncompatibleDriverError : public SystemError { - return DeviceQueueCreateFlags( bit0 ) | bit1; - } + public: + IncompatibleDriverError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} + IncompatibleDriverError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) + {} + }; - VULKAN_HPP_INLINE DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + class TooManyObjectsError : public SystemError { - return ~( DeviceQueueCreateFlags( bits ) ); - } + public: + TooManyObjectsError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} + TooManyObjectsError( char const * message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) + {} + }; - template <> struct FlagTraits + class FormatNotSupportedError : public SystemError { - enum - { - allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) - }; + public: + FormatNotSupportedError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} + FormatNotSupportedError( char const * message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + class FragmentedPoolError : public SystemError { - if ( !value ) return "{}"; - std::string result; - - if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DisplayModeCreateFlagBitsKHR - {}; + public: + FragmentedPoolError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} + FragmentedPoolError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) + {} + }; - VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) + class UnknownError : public SystemError { - return "(void)"; - } - - using DisplayModeCreateFlagsKHR = Flags; + public: + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + }; - VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) + class OutOfPoolMemoryError : public SystemError { - return "{}"; - } + public: + OutOfPoolMemoryError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} + OutOfPoolMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) + {} + }; - enum class DisplayPlaneAlphaFlagBitsKHR + class InvalidExternalHandleError : public SystemError { - eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, - eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, - ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, - ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + public: + InvalidExternalHandleError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} + InvalidExternalHandleError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + class FragmentationError : public SystemError { - switch ( value ) - { - case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque"; - case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global"; - case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel"; - case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied"; - default: return "invalid"; - } - } - - using DisplayPlaneAlphaFlagsKHR = Flags; + public: + FragmentationError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) + {} + }; - VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) + class InvalidOpaqueCaptureAddressError : public SystemError { - return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; - } + public: + InvalidOpaqueCaptureAddressError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} + InvalidOpaqueCaptureAddressError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) + {} + }; - VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + class SurfaceLostKHRError : public SystemError { - return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); - } + public: + SurfaceLostKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} + SurfaceLostKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) + {} + }; - template <> struct FlagTraits + class NativeWindowInUseKHRError : public SystemError { - enum - { - allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) - }; + public: + NativeWindowInUseKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} + NativeWindowInUseKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + class OutOfDateKHRError : public SystemError { - if ( !value ) return "{}"; - std::string result; - - if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | "; - if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class DisplaySurfaceCreateFlagBitsKHR - {}; + public: + OutOfDateKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) + {} + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; - VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) + class IncompatibleDisplayKHRError : public SystemError { - return "(void)"; - } - - using DisplaySurfaceCreateFlagsKHR = Flags; + public: + IncompatibleDisplayKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} + IncompatibleDisplayKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) + {} + }; - VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) + class ValidationFailedEXTError : public SystemError { - return "{}"; - } - - enum class EventCreateFlagBits - {}; + public: + ValidationFailedEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} + ValidationFailedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) + {} + }; - VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits ) + class InvalidShaderNVError : public SystemError { - return "(void)"; - } - - using EventCreateFlags = Flags; + public: + InvalidShaderNVError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} + InvalidShaderNVError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) + {} + }; - VULKAN_HPP_INLINE std::string to_string( EventCreateFlags ) + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError { - return "{}"; - } + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + {} + }; - enum class ExternalFenceFeatureFlagBits + class NotPermittedKHRError : public SystemError { - eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, - eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR, - eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR + public: + NotPermittedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) + {} + NotPermittedKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) + {} }; - VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + class FullScreenExclusiveModeLostEXTError : public SystemError { - switch ( value ) - { - case ExternalFenceFeatureFlagBits::eExportable : return "Exportable"; - case ExternalFenceFeatureFlagBits::eImportable : return "Importable"; - default: return "invalid"; - } - } + public: + FullScreenExclusiveModeLostEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} + FullScreenExclusiveModeLostEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + {} + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + namespace + { + [[noreturn]] void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); + case Result::eErrorDeviceLost: throw DeviceLostError( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); + case Result::eErrorUnknown: throw UnknownError( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); + case Result::eErrorFragmentation: throw FragmentationError( message ); + case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: + throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); + case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + default: throw SystemError( make_error_code( result ) ); + } + } + } // namespace +#endif - using ExternalFenceFeatureFlags = Flags; + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + {} - VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) + template + struct ResultValue { - return ExternalFenceFeatureFlags( bit0 ) | bit1; - } +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) +#else + ResultValue( Result r, T & v ) +#endif + : result( r ), value( v ) + {} - VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) - { - return ~( ExternalFenceFeatureFlags( bits ) ); - } +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) +#else + ResultValue( Result r, T && v ) +#endif + : result( r ), value( std::move( v ) ) + {} - template <> struct FlagTraits - { - enum + Result result; + T value; + + operator std::tuple() VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) - }; - }; + return std::tuple( result, value ); + } - using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; +#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T const &() const & VULKAN_HPP_NOEXCEPT + { + return value; + } - VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) - { - if ( !value ) return "{}"; - std::string result; + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T &() & VULKAN_HPP_NOEXCEPT + { + return value; + } - if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T const &&() const && VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } - enum class ExternalFenceHandleTypeFlagBits - { - eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, - eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, - eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR, - eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, - eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator T &&() && VULKAN_HPP_NOEXCEPT + { + return std::move( value ); + } +#endif }; - VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + struct ResultValue> { - switch ( value ) +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, UniqueHandle && v ) +# endif + : result( r ) + , value( std::move( v ) ) + {} + + std::tuple> asTuple() { - case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd"; - default: return "invalid"; + return std::make_tuple( result, std::move( value ) ); } - } - - using ExternalFenceHandleTypeFlags = Flags; - - VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) - { - return ExternalFenceHandleTypeFlags( bit0 ) | bit1; - } - VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) - { - return ~( ExternalFenceHandleTypeFlags( bits ) ); - } +# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator UniqueHandle &() & VULKAN_HPP_NOEXCEPT + { + return value; + } - template <> struct FlagTraits - { - enum + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator UniqueHandle() VULKAN_HPP_NOEXCEPT { - allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) - }; - }; + return std::move( value ); + } +# endif - using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + Result result; + UniqueHandle value; + }; - VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + template + struct ResultValue>> { - if ( !value ) return "{}"; - std::string result; +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + {} - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + std::tuple>> asTuple() + { + return std::make_tuple( result, std::move( value ) ); + } - enum class ExternalMemoryFeatureFlagBits - { - eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, - eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, - eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR, - eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR, - eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR - }; + Result result; + std::vector> value; - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) - { - switch ( value ) +# if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST ) + VULKAN_HPP_DEPRECATED( + "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." ) + operator std::tuple> &>() VULKAN_HPP_NOEXCEPT { - case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly"; - case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable"; - case ExternalMemoryFeatureFlagBits::eImportable : return "Importable"; - default: return "invalid"; + return std::tuple> &>( result, value ); } - } - - using ExternalMemoryFeatureFlags = Flags; +# endif + }; +#endif - VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) + template + struct ResultValueType { - return ExternalMemoryFeatureFlags( bit0 ) | bit1; - } +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef ResultValue type; +#else + typedef T type; +#endif + }; - VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + template <> + struct ResultValueType { - return ~( ExternalMemoryFeatureFlags( bits ) ); - } - - template <> struct FlagTraits - { - enum - { - allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) - }; +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef Result type; +#else + typedef void type; +#endif }; - using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) { - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | "; - if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return result; +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } +#endif } - enum class ExternalMemoryFeatureFlagBitsNV - { - eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, - eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, - eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) { - switch ( value ) +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue( result, std::move( data ) ); +#else + if ( result != Result::eSuccess ) { - case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly"; - case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable"; - case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable"; - default: return "invalid"; + throwResultException( result, message ); } + return std::move( data ); +#endif } - using ExternalMemoryFeatureFlagsNV = Flags; - - VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) + VULKAN_HPP_INLINE Result createResultValue( Result result, + char const * message, + std::initializer_list successCodes ) { - return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return result; } - VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + template + VULKAN_HPP_INLINE ResultValue + createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) { - return ~( ExternalMemoryFeatureFlagsNV( bits ) ); +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return ResultValue( result, std::move( data ) ); } - template <> struct FlagTraits +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( + Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const & deleter ) { - enum +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>( result, UniqueHandle( data, deleter ) ); +# else + if ( result != Result::eSuccess ) { - allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | "; - if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | "; - if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; + throwResultException( result, message ); + } + return UniqueHandle( data, deleter ); +# endif } - enum class ExternalMemoryHandleTypeFlagBits - { - eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, - eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, - eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, - eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, - eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, - eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, - eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, - eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, - eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, - eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR, - eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, - eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR, - eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR, - eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR, - eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) - { - switch ( value ) - { - case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture"; - case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt"; - case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap"; - case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource"; - case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT"; - case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID"; - case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT"; - case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT"; - default: return "invalid"; + template + VULKAN_HPP_INLINE ResultValue> + createResultValue( Result result, + T & data, + char const * message, + std::initializer_list successCodes, + typename UniqueHandleTraits::deleter const & deleter ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); } +# endif + return ResultValue>( result, UniqueHandle( data, deleter ) ); } - using ExternalMemoryHandleTypeFlags = Flags; - - VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) + template + VULKAN_HPP_INLINE typename ResultValueType>>::type + createResultValue( Result result, std::vector> && data, char const * message ) { - return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); + return ResultValue>>( result, std::move( data ) ); +# else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +# endif } - VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) - { - return ~( ExternalMemoryHandleTypeFlags( bits ) ); + template + VULKAN_HPP_INLINE ResultValue>> + createResultValue( Result result, + std::vector> && data, + char const * message, + std::initializer_list successCodes ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +# else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +# endif + return ResultValue>>( result, std::move( data ) ); } +#endif +} // namespace VULKAN_HPP_NAMESPACE + +// clang-format off +#include +#include +#include +// clang-format on - template <> struct FlagTraits +namespace VULKAN_HPP_NAMESPACE +{ + //======================= + //=== STRUCTS EXTENDS === + //======================= + + //=== VK_VERSION_1_1 === + template <> + struct StructExtends { enum { - allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) + value = true }; }; - - using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | "; - if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ExternalMemoryHandleTypeFlagBitsNV + template <> + struct StructExtends { - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, - eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, - eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, - eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image"; - case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt"; - default: return "invalid"; - } - } - - using ExternalMemoryHandleTypeFlagsNV = Flags; - - VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) - { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) - { - return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | "; - if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ExternalSemaphoreFeatureFlagBits + template <> + struct StructExtends { - eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, - eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, - eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR, - eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable"; - case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable"; - default: return "invalid"; - } - } - - using ExternalSemaphoreFeatureFlags = Flags; - - VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) - { - return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) - { - return ~( ExternalSemaphoreFeatureFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) + value = true }; }; - - using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | "; - if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ExternalSemaphoreHandleTypeFlagBits + template <> + struct StructExtends { - eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, - eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, - eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, - eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, - eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, - eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, - eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR, - eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, - eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR, - eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; - case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; - case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence"; - case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd"; - default: return "invalid"; - } - } - - using ExternalSemaphoreHandleTypeFlags = Flags; - - VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) + value = true }; }; - - using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; - - VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | "; - if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class FenceCreateFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case FenceCreateFlagBits::eSignaled : return "Signaled"; - default: return "invalid"; - } - } - - using FenceCreateFlags = Flags; - - VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return FenceCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( FenceCreateFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(FenceCreateFlagBits::eSignaled) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class FenceImportFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT, - eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case FenceImportFlagBits::eTemporary : return "Temporary"; - default: return "invalid"; - } - } - - using FenceImportFlags = Flags; - - VULKAN_HPP_INLINE FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return FenceImportFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE FenceImportFlags operator~( FenceImportFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( FenceImportFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(FenceImportFlagBits::eTemporary) + value = true }; }; - - using FenceImportFlagsKHR = FenceImportFlags; - - VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class FormatFeatureFlagBits - { - eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, - eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, - eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, - eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, - eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, - eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, - eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, - eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, - eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, - eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, - eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, - eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, - eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, - eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, - eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, - eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, - eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, - eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, - eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, - eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, - eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, - eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, - eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, - eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, - eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, - eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, - eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, - eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, - eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value ) - { - switch ( value ) - { - case FormatFeatureFlagBits::eSampledImage : return "SampledImage"; - case FormatFeatureFlagBits::eStorageImage : return "StorageImage"; - case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic"; - case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; - case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; - case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic"; - case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer"; - case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment"; - case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend"; - case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; - case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc"; - case FormatFeatureFlagBits::eBlitDst : return "BlitDst"; - case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear"; - case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc"; - case FormatFeatureFlagBits::eTransferDst : return "TransferDst"; - case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit"; - case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; - case FormatFeatureFlagBits::eDisjoint : return "Disjoint"; - case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples"; - case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG"; - case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT : return "SampledImageFilterMinmaxEXT"; - case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; - default: return "invalid"; - } - } - - using FormatFeatureFlags = Flags; - - VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) - { - return FormatFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) - { - return ~( FormatFeatureFlags( bits ) ); - } - - template <> struct FlagTraits - { - enum - { - allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | "; - if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | "; - if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | "; - if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; - if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; - if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | "; - if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | "; - if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | "; - if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | "; - if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; - if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | "; - if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | "; - if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; - if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; - if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | "; - if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT ) result += "SampledImageFilterMinmaxEXT | "; - if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class FramebufferCreateFlagBits - { - eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case FramebufferCreateFlagBits::eImagelessKHR : return "ImagelessKHR"; - default: return "invalid"; - } - } - - using FramebufferCreateFlags = Flags; - - VULKAN_HPP_INLINE FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) - { - return FramebufferCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) - { - return ~( FramebufferCreateFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(FramebufferCreateFlagBits::eImagelessKHR) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & FramebufferCreateFlagBits::eImagelessKHR ) result += "ImagelessKHR | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class GeometryFlagBitsNV + template <> + struct StructExtends { - eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV, - eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case GeometryFlagBitsNV::eOpaque : return "Opaque"; - case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; - default: return "invalid"; - } - } - - using GeometryFlagsNV = Flags; - - VULKAN_HPP_INLINE GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) - { - return GeometryFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) - { - return ~( GeometryFlagsNV( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & GeometryFlagBitsNV::eOpaque ) result += "Opaque | "; - if ( value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class GeometryInstanceFlagBitsNV + template <> + struct StructExtends { - eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, - eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV, - eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV, - eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable"; - case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; - case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque"; - case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque"; - default: return "invalid"; - } - } - - using GeometryInstanceFlagsNV = Flags; - - VULKAN_HPP_INLINE GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) - { - return GeometryInstanceFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) - { - return ~( GeometryInstanceFlagsNV( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & GeometryInstanceFlagBitsNV::eTriangleCullDisable ) result += "TriangleCullDisable | "; - if ( value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; - if ( value & GeometryInstanceFlagBitsNV::eForceOpaque ) result += "ForceOpaque | "; - if ( value & GeometryInstanceFlagBitsNV::eForceNoOpaque ) result += "ForceNoOpaque | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class HeadlessSurfaceCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) - { - return "(void)"; - } - - using HeadlessSurfaceCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) - { - return "{}"; - } - -#ifdef VK_USE_PLATFORM_IOS_MVK - enum class IOSSurfaceCreateFlagBitsMVK - {}; - - VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) - { - return "(void)"; - } - - using IOSSurfaceCreateFlagsMVK = Flags; - - VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) - { - return "{}"; - } -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - - enum class ImageAspectFlagBits - { - eColor = VK_IMAGE_ASPECT_COLOR_BIT, - eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, - eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, - eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, - ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, - ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, - ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, - eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, - eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, - eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, - eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, - ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, - ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, - ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) - { - switch ( value ) - { - case ImageAspectFlagBits::eColor : return "Color"; - case ImageAspectFlagBits::eDepth : return "Depth"; - case ImageAspectFlagBits::eStencil : return "Stencil"; - case ImageAspectFlagBits::eMetadata : return "Metadata"; - case ImageAspectFlagBits::ePlane0 : return "Plane0"; - case ImageAspectFlagBits::ePlane1 : return "Plane1"; - case ImageAspectFlagBits::ePlane2 : return "Plane2"; - case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT"; - case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT"; - case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT"; - case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT"; - default: return "invalid"; - } - } - - using ImageAspectFlags = Flags; - - VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) - { - return ImageAspectFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits ) - { - return ~( ImageAspectFlags( bits ) ); - } - - template <> struct FlagTraits + template <> + struct StructExtends { enum { - allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ImageAspectFlagBits::eColor ) result += "Color | "; - if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | "; - if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | "; - if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | "; - if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | "; - if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | "; - if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | "; - if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | "; - if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ImageCreateFlagBits - { - eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, - eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, - eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, - eAlias = VK_IMAGE_CREATE_ALIAS_BIT, - eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, - e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, - eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, - eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, - eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, - eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, - eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, - eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, - eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, - eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, - e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, - eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, - eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, - eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, - eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR - }; - - VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) - { - switch ( value ) - { - case ImageCreateFlagBits::eSparseBinding : return "SparseBinding"; - case ImageCreateFlagBits::eSparseResidency : return "SparseResidency"; - case ImageCreateFlagBits::eSparseAliased : return "SparseAliased"; - case ImageCreateFlagBits::eMutableFormat : return "MutableFormat"; - case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible"; - case ImageCreateFlagBits::eAlias : return "Alias"; - case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; - case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible"; - case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible"; - case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage"; - case ImageCreateFlagBits::eProtected : return "Protected"; - case ImageCreateFlagBits::eDisjoint : return "Disjoint"; - case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV"; - case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT"; - case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; - default: return "invalid"; - } - } - - using ImageCreateFlags = Flags; - - VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) - { - return ImageCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits ) - { - return ~( ImageCreateFlags( bits ) ); - } - - template <> struct FlagTraits + template <> + struct StructExtends { enum { - allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; - if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; - if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | "; - if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | "; - if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | "; - if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; - if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | "; - if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | "; - if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | "; - if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | "; - if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | "; - if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | "; - if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | "; - if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - -#ifdef VK_USE_PLATFORM_FUCHSIA - enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA - {}; - - VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) - { - return "(void)"; - } - - using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; - - VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) - { - return "{}"; - } -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - enum class ImageUsageFlagBits + template <> + struct StructExtends { - eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, - eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, - eStorage = VK_IMAGE_USAGE_STORAGE_BIT, - eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, - eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, - eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ImageUsageFlagBits::eTransferSrc : return "TransferSrc"; - case ImageUsageFlagBits::eTransferDst : return "TransferDst"; - case ImageUsageFlagBits::eSampled : return "Sampled"; - case ImageUsageFlagBits::eStorage : return "Storage"; - case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment"; - case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; - case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment"; - case ImageUsageFlagBits::eInputAttachment : return "InputAttachment"; - case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; - default: return "invalid"; - } - } - - using ImageUsageFlags = Flags; - - VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return ImageUsageFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( ImageUsageFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; - if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | "; - if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | "; - if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | "; - if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | "; - if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; - if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | "; - if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | "; - if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ImageViewCreateFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT"; - default: return "invalid"; - } - } - - using ImageViewCreateFlags = Flags; - - VULKAN_HPP_INLINE ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return ImageViewCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( ImageViewCreateFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + enum + { + value = true + }; + }; - enum class IndirectCommandsLayoutUsageFlagBitsNVX + //=== VK_VERSION_1_2 === + template <> + struct StructExtends { - eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX, - eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX, - eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX, - eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions"; - case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences"; - default: return "invalid"; - } - } - - using IndirectCommandsLayoutUsageFlagsNVX = Flags; - - VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences ) result += "UnorderedSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences ) result += "SparseSequences | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions ) result += "EmptyExecutions | "; - if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences ) result += "IndexedSequences | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class InstanceCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using InstanceCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - -#ifdef VK_USE_PLATFORM_MACOS_MVK - enum class MacOSSurfaceCreateFlagBitsMVK - {}; - - VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using MacOSSurfaceCreateFlagsMVK = Flags; - - VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - enum class MemoryAllocateFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, - eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask"; - default: return "invalid"; - } - } - - using MemoryAllocateFlags = Flags; - - VULKAN_HPP_INLINE MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return MemoryAllocateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( MemoryAllocateFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) + value = true }; }; - - using MemoryAllocateFlagsKHR = MemoryAllocateFlags; - - VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class MemoryHeapFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, - eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, - eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal"; - case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance"; - default: return "invalid"; - } - } - - using MemoryHeapFlags = Flags; - - VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return MemoryHeapFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( MemoryHeapFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | "; - if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class MemoryMapFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using MemoryMapFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class MemoryPropertyFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, - eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, - eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, - eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, - eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, - eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, - eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, - eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal"; - case MemoryPropertyFlagBits::eHostVisible : return "HostVisible"; - case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent"; - case MemoryPropertyFlagBits::eHostCached : return "HostCached"; - case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated"; - case MemoryPropertyFlagBits::eProtected : return "Protected"; - case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD"; - case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD"; - default: return "invalid"; - } - } - - using MemoryPropertyFlags = Flags; - - VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return MemoryPropertyFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( MemoryPropertyFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | "; - if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | "; - if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | "; - if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | "; - if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | "; - if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | "; - if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | "; - if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - -#ifdef VK_USE_PLATFORM_METAL_EXT - enum class MetalSurfaceCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using MetalSurfaceCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - enum class ObjectEntryUsageFlagBitsNVX + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX, - eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics"; - case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute"; - default: return "invalid"; - } - } - - using ObjectEntryUsageFlagsNVX = Flags; - - VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return ObjectEntryUsageFlagsNVX( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( ObjectEntryUsageFlagsNVX( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & ObjectEntryUsageFlagBitsNVX::eGraphics ) result += "Graphics | "; - if ( value & ObjectEntryUsageFlagBitsNVX::eCompute ) result += "Compute | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PeerMemoryFeatureFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, - eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, - eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, - eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, - eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR, - eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR, - eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR, - eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc"; - case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst"; - case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc"; - case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst"; - default: return "invalid"; - } - } - - using PeerMemoryFeatureFlags = Flags; - - VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return PeerMemoryFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( PeerMemoryFeatureFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) + value = true }; }; - - using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; - - VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | "; - if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | "; - if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | "; - if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineCacheCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits ) - { - return "(void)"; - } - - using PipelineCacheCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineColorBlendStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineColorBlendStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineCompilerControlFlagBitsAMD - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineCompilerControlFlagsAMD = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineCoverageModulationStateCreateFlagBitsNV - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineCoverageModulationStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineCoverageReductionStateCreateFlagBitsNV - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineCoverageReductionStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineCoverageToColorStateCreateFlagBitsNV - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineCoverageToColorStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineCreateFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, - eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, - eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, - eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, - eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE, - eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, - eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, - eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, - eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, - eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization"; - case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives"; - case PipelineCreateFlagBits::eDerivative : return "Derivative"; - case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex"; - case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase"; - case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV"; - case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR"; - case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR"; - default: return "invalid"; - } - } - - using PipelineCreateFlags = Flags; - - VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return PipelineCreateFlags( bit0 ) | bit1; - } + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + //=== VK_VERSION_1_3 === + template <> + struct StructExtends { - return ~( PipelineCreateFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | "; - if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | "; - if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | "; - if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; - if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | "; - if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; - if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; - if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineCreationFeedbackFlagBitsEXT + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, - eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, - eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid"; - case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit"; - case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration"; - default: return "invalid"; - } - } - - using PipelineCreationFeedbackFlagsEXT = Flags; - - VULKAN_HPP_INLINE PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( PipelineCreationFeedbackFlagsEXT( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | "; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | "; - if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineDepthStencilStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineDepthStencilStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineDiscardRectangleStateCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineDynamicStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineDynamicStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineInputAssemblyStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineInputAssemblyStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineLayoutCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using PipelineLayoutCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class PipelineMultisampleStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineMultisampleStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) - { - return "{}"; - } - - enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) - { - return "{}"; - } - - enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) - { - return "{}"; - } - - enum class PipelineRasterizationStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineRasterizationStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) - { - return "{}"; - } - - enum class PipelineRasterizationStateStreamCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) - { - return "(void)"; - } - - using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) - { - return "{}"; - } - - enum class PipelineShaderStageCreateFlagBits - { - eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, - eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT"; - case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT"; - default: return "invalid"; - } - } - - using PipelineShaderStageCreateFlags = Flags; - - VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) - { - return PipelineShaderStageCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) - { - return ~( PipelineShaderStageCreateFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | "; - if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineStageFlagBits - { - eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, - eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, - eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, - eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, - eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, - eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, - eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, - eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, - eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, - eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, - eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, - eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, - eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, - eHost = VK_PIPELINE_STAGE_HOST_BIT, - eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, - eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, - eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, - eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, - eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, - eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, - eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, - eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, - eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) - { - switch ( value ) - { - case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe"; - case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect"; - case PipelineStageFlagBits::eVertexInput : return "VertexInput"; - case PipelineStageFlagBits::eVertexShader : return "VertexShader"; - case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader"; - case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader"; - case PipelineStageFlagBits::eGeometryShader : return "GeometryShader"; - case PipelineStageFlagBits::eFragmentShader : return "FragmentShader"; - case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests"; - case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests"; - case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput"; - case PipelineStageFlagBits::eComputeShader : return "ComputeShader"; - case PipelineStageFlagBits::eTransfer : return "Transfer"; - case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe"; - case PipelineStageFlagBits::eHost : return "Host"; - case PipelineStageFlagBits::eAllGraphics : return "AllGraphics"; - case PipelineStageFlagBits::eAllCommands : return "AllCommands"; - case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT"; - case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; - case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX"; - case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; - case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV"; - case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV"; - case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV"; - case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV"; - case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT"; - default: return "invalid"; - } - } - - using PipelineStageFlags = Flags; - - VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) - { - return PipelineStageFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits ) - { - return ~( PipelineStageFlags( bits ) ); - } - - template <> struct FlagTraits - { - enum - { - allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) - }; - }; - - VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | "; - if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | "; - if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | "; - if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | "; - if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | "; - if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | "; - if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | "; - if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | "; - if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | "; - if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | "; - if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | "; - if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | "; - if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | "; - if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | "; - if ( value & PipelineStageFlagBits::eHost ) result += "Host | "; - if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | "; - if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | "; - if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; - if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; - if ( value & PipelineStageFlagBits::eCommandProcessNVX ) result += "CommandProcessNVX | "; - if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; - if ( value & PipelineStageFlagBits::eRayTracingShaderNV ) result += "RayTracingShaderNV | "; - if ( value & PipelineStageFlagBits::eAccelerationStructureBuildNV ) result += "AccelerationStructureBuildNV | "; - if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | "; - if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | "; - if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class PipelineTessellationStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineTessellationStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) - { - return "{}"; - } - - enum class PipelineVertexInputStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineVertexInputStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) - { - return "{}"; - } - - enum class PipelineViewportStateCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) - { - return "(void)"; - } - - using PipelineViewportStateCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) - { - return "{}"; - } - - enum class PipelineViewportSwizzleStateCreateFlagBitsNV - {}; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) - { - return "(void)"; - } - - using PipelineViewportSwizzleStateCreateFlagsNV = Flags; - - VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) - { - return "{}"; - } - - enum class QueryControlFlagBits + template <> + struct StructExtends { - ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case QueryControlFlagBits::ePrecise : return "Precise"; - default: return "invalid"; - } - } - - using QueryControlFlags = Flags; - - VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) - { - return QueryControlFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits ) - { - return ~( QueryControlFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(QueryControlFlagBits::ePrecise) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class QueryPipelineStatisticFlagBits - { - eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, - eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, - eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, - eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, - eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, - eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, - eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, - eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, - eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, - eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, - eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT - }; - - VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) - { - switch ( value ) - { - case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices"; - case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives"; - case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations"; - case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations"; - case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives"; - case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations"; - case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives"; - case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations"; - case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches"; - case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations"; - case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations"; - default: return "invalid"; - } - } - - using QueryPipelineStatisticFlags = Flags; - - VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) - { - return QueryPipelineStatisticFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) - { - return ~( QueryPipelineStatisticFlags( bits ) ); - } - - template <> struct FlagTraits + template <> + struct StructExtends { enum { - allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | "; - if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | "; - if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | "; - if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | "; - if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class QueryPoolCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) - { - return "(void)"; - } - - using QueryPoolCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) - { - return "{}"; - } - - enum class QueryResultFlagBits + template <> + struct StructExtends { - e64 = VK_QUERY_RESULT_64_BIT, - eWait = VK_QUERY_RESULT_WAIT_BIT, - eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, - ePartial = VK_QUERY_RESULT_PARTIAL_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case QueryResultFlagBits::e64 : return "64"; - case QueryResultFlagBits::eWait : return "Wait"; - case QueryResultFlagBits::eWithAvailability : return "WithAvailability"; - case QueryResultFlagBits::ePartial : return "Partial"; - default: return "invalid"; - } - } - - using QueryResultFlags = Flags; - - VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) - { - return QueryResultFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits ) - { - return ~( QueryResultFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & QueryResultFlagBits::e64 ) result += "64 | "; - if ( value & QueryResultFlagBits::eWait ) result += "Wait | "; - if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | "; - if ( value & QueryResultFlagBits::ePartial ) result += "Partial | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class QueueFlagBits + template <> + struct StructExtends { - eGraphics = VK_QUEUE_GRAPHICS_BIT, - eCompute = VK_QUEUE_COMPUTE_BIT, - eTransfer = VK_QUEUE_TRANSFER_BIT, - eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, - eProtected = VK_QUEUE_PROTECTED_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case QueueFlagBits::eGraphics : return "Graphics"; - case QueueFlagBits::eCompute : return "Compute"; - case QueueFlagBits::eTransfer : return "Transfer"; - case QueueFlagBits::eSparseBinding : return "SparseBinding"; - case QueueFlagBits::eProtected : return "Protected"; - default: return "invalid"; - } - } - - using QueueFlags = Flags; - - VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) - { - return QueueFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits ) - { - return ~( QueueFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & QueueFlagBits::eGraphics ) result += "Graphics | "; - if ( value & QueueFlagBits::eCompute ) result += "Compute | "; - if ( value & QueueFlagBits::eTransfer ) result += "Transfer | "; - if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | "; - if ( value & QueueFlagBits::eProtected ) result += "Protected | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class RenderPassCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits ) - { - return "(void)"; - } - - using RenderPassCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags ) - { - return "{}"; - } - - enum class ResolveModeFlagBitsKHR + template <> + struct StructExtends { - eNone = VK_RESOLVE_MODE_NONE_KHR, - eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR, - eAverage = VK_RESOLVE_MODE_AVERAGE_BIT_KHR, - eMin = VK_RESOLVE_MODE_MIN_BIT_KHR, - eMax = VK_RESOLVE_MODE_MAX_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBitsKHR value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case ResolveModeFlagBitsKHR::eNone : return "None"; - case ResolveModeFlagBitsKHR::eSampleZero : return "SampleZero"; - case ResolveModeFlagBitsKHR::eAverage : return "Average"; - case ResolveModeFlagBitsKHR::eMin : return "Min"; - case ResolveModeFlagBitsKHR::eMax : return "Max"; - default: return "invalid"; - } - } - - using ResolveModeFlagsKHR = Flags; - - VULKAN_HPP_INLINE ResolveModeFlagsKHR operator|( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) - { - return ResolveModeFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE ResolveModeFlagsKHR operator~( ResolveModeFlagBitsKHR bits ) + value = true + }; + }; + template <> + struct StructExtends { - return ~( ResolveModeFlagsKHR( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(ResolveModeFlagBitsKHR::eNone) | VkFlags(ResolveModeFlagBitsKHR::eSampleZero) | VkFlags(ResolveModeFlagBitsKHR::eAverage) | VkFlags(ResolveModeFlagBitsKHR::eMin) | VkFlags(ResolveModeFlagBitsKHR::eMax) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagsKHR value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & ResolveModeFlagBitsKHR::eSampleZero ) result += "SampleZero | "; - if ( value & ResolveModeFlagBitsKHR::eAverage ) result += "Average | "; - if ( value & ResolveModeFlagBitsKHR::eMin ) result += "Min | "; - if ( value & ResolveModeFlagBitsKHR::eMax ) result += "Max | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SampleCountFlagBits - { - e1 = VK_SAMPLE_COUNT_1_BIT, - e2 = VK_SAMPLE_COUNT_2_BIT, - e4 = VK_SAMPLE_COUNT_4_BIT, - e8 = VK_SAMPLE_COUNT_8_BIT, - e16 = VK_SAMPLE_COUNT_16_BIT, - e32 = VK_SAMPLE_COUNT_32_BIT, - e64 = VK_SAMPLE_COUNT_64_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SampleCountFlagBits::e1 : return "1"; - case SampleCountFlagBits::e2 : return "2"; - case SampleCountFlagBits::e4 : return "4"; - case SampleCountFlagBits::e8 : return "8"; - case SampleCountFlagBits::e16 : return "16"; - case SampleCountFlagBits::e32 : return "32"; - case SampleCountFlagBits::e64 : return "64"; - default: return "invalid"; - } - } - - using SampleCountFlags = Flags; - - VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) - { - return SampleCountFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits ) - { - return ~( SampleCountFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & SampleCountFlagBits::e1 ) result += "1 | "; - if ( value & SampleCountFlagBits::e2 ) result += "2 | "; - if ( value & SampleCountFlagBits::e4 ) result += "4 | "; - if ( value & SampleCountFlagBits::e8 ) result += "8 | "; - if ( value & SampleCountFlagBits::e16 ) result += "16 | "; - if ( value & SampleCountFlagBits::e32 ) result += "32 | "; - if ( value & SampleCountFlagBits::e64 ) result += "64 | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SamplerCreateFlagBits + template <> + struct StructExtends { - eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, - eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; - case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT"; - default: return "invalid"; - } - } - - using SamplerCreateFlags = Flags; - - VULKAN_HPP_INLINE SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) - { - return SamplerCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) - { - return ~( SamplerCreateFlags( bits ) ); - } - - template <> struct FlagTraits + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) - { - if ( !value ) return "{}"; - std::string result; - - if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; - if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SemaphoreCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) - { - return "(void)"; - } - - using SemaphoreCreateFlags = Flags; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) - { - return "{}"; - } - - enum class SemaphoreImportFlagBits + template <> + struct StructExtends { - eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, - eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SemaphoreImportFlagBits::eTemporary : return "Temporary"; - default: return "invalid"; - } - } - - using SemaphoreImportFlags = Flags; - - VULKAN_HPP_INLINE SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SemaphoreImportFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SemaphoreImportFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) + value = true }; }; - using SemaphoreImportFlagsKHR = SemaphoreImportFlags; - - VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + //=== VK_KHR_swapchain === + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ShaderCorePropertiesFlagBitsAMD - {}; - - VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using ShaderCorePropertiesFlagsAMD = Flags; - - VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - - enum class ShaderModuleCreateFlagBits - {}; - - VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using ShaderModuleCreateFlags = Flags; + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + //=== VK_KHR_display_swapchain === + template <> + struct StructExtends { - return "{}"; - } - - enum class ShaderStageFlagBits - { - eVertex = VK_SHADER_STAGE_VERTEX_BIT, - eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, - eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, - eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, - eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, - eCompute = VK_SHADER_STAGE_COMPUTE_BIT, - eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, - eAll = VK_SHADER_STAGE_ALL, - eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, - eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, - eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, - eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, - eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, - eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, - eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, - eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV - }; - - VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) - { - switch ( value ) - { - case ShaderStageFlagBits::eVertex : return "Vertex"; - case ShaderStageFlagBits::eTessellationControl : return "TessellationControl"; - case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation"; - case ShaderStageFlagBits::eGeometry : return "Geometry"; - case ShaderStageFlagBits::eFragment : return "Fragment"; - case ShaderStageFlagBits::eCompute : return "Compute"; - case ShaderStageFlagBits::eAllGraphics : return "AllGraphics"; - case ShaderStageFlagBits::eAll : return "All"; - case ShaderStageFlagBits::eRaygenNV : return "RaygenNV"; - case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV"; - case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV"; - case ShaderStageFlagBits::eMissNV : return "MissNV"; - case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV"; - case ShaderStageFlagBits::eCallableNV : return "CallableNV"; - case ShaderStageFlagBits::eTaskNV : return "TaskNV"; - case ShaderStageFlagBits::eMeshNV : return "MeshNV"; - default: return "invalid"; - } - } - - using ShaderStageFlags = Flags; + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) + //=== VK_EXT_debug_report === + template <> + struct StructExtends { - return ShaderStageFlags( bit0 ) | bit1; - } + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits ) + //=== VK_AMD_rasterization_order === + template <> + struct StructExtends { - return ~( ShaderStageFlags( bits ) ); - } + enum + { + value = true + }; + }; - template <> struct FlagTraits +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + template <> + struct StructExtends { enum { - allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | "; - if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | "; - if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | "; - if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | "; - if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | "; - if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | "; - if ( value & ShaderStageFlagBits::eRaygenNV ) result += "RaygenNV | "; - if ( value & ShaderStageFlagBits::eAnyHitNV ) result += "AnyHitNV | "; - if ( value & ShaderStageFlagBits::eClosestHitNV ) result += "ClosestHitNV | "; - if ( value & ShaderStageFlagBits::eMissNV ) result += "MissNV | "; - if ( value & ShaderStageFlagBits::eIntersectionNV ) result += "IntersectionNV | "; - if ( value & ShaderStageFlagBits::eCallableNV ) result += "CallableNV | "; - if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | "; - if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SparseImageFormatFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, - eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, - eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail"; - case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize"; - case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize"; - default: return "invalid"; - } - } - - using SparseImageFormatFlags = Flags; - - VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SparseImageFormatFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SparseImageFormatFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | "; - if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | "; - if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SparseMemoryBindFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SparseMemoryBindFlagBits::eMetadata : return "Metadata"; - default: return "invalid"; - } - } - - using SparseMemoryBindFlags = Flags; - - VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SparseMemoryBindFlags( bit0 ) | bit1; - } + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + //=== VK_NV_dedicated_allocation === + template <> + struct StructExtends { - return ~( SparseMemoryBindFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + enum + { + value = true + }; + }; - enum class StencilFaceFlagBits + //=== VK_EXT_transform_feedback === + template <> + struct StructExtends { - eFront = VK_STENCIL_FACE_FRONT_BIT, - eBack = VK_STENCIL_FACE_BACK_BIT, - eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, - eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case StencilFaceFlagBits::eFront : return "Front"; - case StencilFaceFlagBits::eBack : return "Back"; - case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack"; - default: return "invalid"; - } - } - - using StencilFaceFlags = Flags; - - VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return StencilFaceFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( StencilFaceFlags( bits ) ); - } + enum + { + value = true + }; + }; - template <> struct FlagTraits +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + template <> + struct StructExtends { enum { - allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & StencilFaceFlagBits::eFront ) result += "Front | "; - if ( value & StencilFaceFlagBits::eBack ) result += "Back | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - -#ifdef VK_USE_PLATFORM_GGP - enum class StreamDescriptorSurfaceCreateFlagBitsGGP - {}; - - VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using StreamDescriptorSurfaceCreateFlagsGGP = Flags; - - VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } -#endif /*VK_USE_PLATFORM_GGP*/ - - enum class SubgroupFeatureFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, - eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, - eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, - eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, - eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, - eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, - eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, - eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, - ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SubgroupFeatureFlagBits::eBasic : return "Basic"; - case SubgroupFeatureFlagBits::eVote : return "Vote"; - case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic"; - case SubgroupFeatureFlagBits::eBallot : return "Ballot"; - case SubgroupFeatureFlagBits::eShuffle : return "Shuffle"; - case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative"; - case SubgroupFeatureFlagBits::eClustered : return "Clustered"; - case SubgroupFeatureFlagBits::eQuad : return "Quad"; - case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV"; - default: return "invalid"; - } - } - - using SubgroupFeatureFlags = Flags; - - VULKAN_HPP_INLINE SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SubgroupFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SubgroupFeatureFlags( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | "; - if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | "; - if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | "; - if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | "; - if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | "; - if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | "; - if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | "; - if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | "; - if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SubpassDescriptionFlagBits + enum + { + value = true + }; + }; + template <> + struct StructExtends { - ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, - ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX"; - case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX"; - default: return "invalid"; - } - } - - using SubpassDescriptionFlags = Flags; - - VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SubpassDescriptionFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SubpassDescriptionFlags( bits ) ); - } + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template <> struct FlagTraits +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h265 === + template <> + struct StructExtends { enum { - allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | "; - if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SurfaceCounterFlagBitsEXT + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eVblank = VK_SURFACE_COUNTER_VBLANK_EXT + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank"; - default: return "invalid"; - } - } - - using SurfaceCounterFlagsEXT = Flags; - - VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SurfaceCounterFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SurfaceCounterFlagsEXT( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class SurfaceTransformFlagBitsKHR + enum + { + value = true + }; + }; + template <> + struct StructExtends { - eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, - eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, - eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, - eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, - eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, - eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, - eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, - eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, - eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity"; - case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90"; - case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180"; - case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180"; - case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270"; - case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit"; - default: return "invalid"; - } - } - - using SurfaceTransformFlagsKHR = Flags; - - VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SurfaceTransformFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SurfaceTransformFlagsKHR( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | "; - if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | "; - if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | "; - if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - enum class SwapchainCreateFlagBitsKHR +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h264 === + template <> + struct StructExtends { - eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, - eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, - eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + enum + { + value = true + }; }; - - VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + template <> + struct StructExtends { - switch ( value ) + enum { - case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; - case SwapchainCreateFlagBitsKHR::eProtected : return "Protected"; - case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat"; - default: return "invalid"; - } - } - - using SwapchainCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) + value = true + }; + }; + template <> + struct StructExtends { - return SwapchainCreateFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return ~( SwapchainCreateFlagsKHR( bits ) ); - } - - template <> struct FlagTraits + enum + { + value = true + }; + }; + template <> + struct StructExtends { enum { - allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat) + value = true }; }; - - VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + template <> + struct StructExtends { - if ( !value ) return "{}"; - std::string result; - - if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; - if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | "; - if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | "; - return "{ " + result.substr(0, result.size() - 3) + " }"; - } - - enum class ValidationCacheCreateFlagBitsEXT - {}; - - VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using ValidationCacheCreateFlagsEXT = Flags; - - VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } - -#ifdef VK_USE_PLATFORM_VI_NN - enum class ViSurfaceCreateFlagBitsNN - {}; - - VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using ViSurfaceCreateFlagsNN = Flags; - - VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - enum class WaylandSurfaceCreateFlagBitsKHR - {}; - - VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "(void)"; - } - - using WaylandSurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - enum class Win32SurfaceCreateFlagBitsKHR - {}; - - VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) - { - return "(void)"; - } - - using Win32SurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) - { - return "{}"; - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - enum class XcbSurfaceCreateFlagBitsKHR - {}; - - VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) - { - return "(void)"; - } - - using XcbSurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) - { - return "{}"; - } -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - enum class XlibSurfaceCreateFlagBitsKHR - {}; - - VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) - { - return "(void)"; - } - - using XlibSurfaceCreateFlagsKHR = Flags; - - VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return "{}"; - } -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ -} // namespace VULKAN_HPP_NAMESPACE + enum + { + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -namespace std -{ + //=== VK_AMD_texture_gather_bias_lod === template <> - struct is_error_code_enum : public true_type - {}; -} - -namespace VULKAN_HPP_NAMESPACE -{ -#ifndef VULKAN_HPP_NO_EXCEPTIONS -#if defined(_MSC_VER) && (_MSC_VER == 1800) -# define noexcept _NOEXCEPT -#endif - - class ErrorCategoryImpl : public std::error_category + struct StructExtends { - public: - virtual const char* name() const noexcept override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; } - virtual std::string message(int ev) const override { return to_string(static_cast(ev)); } + enum + { + value = true + }; }; - class Error + //=== VK_KHR_dynamic_rendering === + template <> + struct StructExtends { - public: - virtual ~Error() = default; - - virtual const char* what() const noexcept = 0; + enum + { + value = true + }; }; - - class LogicError : public Error, public std::logic_error + template <> + struct StructExtends { - public: - explicit LogicError( const std::string& what ) - : Error(), std::logic_error(what) {} - explicit LogicError( char const * what ) - : Error(), std::logic_error(what) {} - virtual ~LogicError() = default; - - virtual const char* what() const noexcept { return std::logic_error::what(); } + enum + { + value = true + }; }; - - class SystemError : public Error, public std::system_error + template <> + struct StructExtends { - public: - SystemError( std::error_code ec ) - : Error(), std::system_error(ec) {} - SystemError( std::error_code ec, std::string const& what ) - : Error(), std::system_error(ec, what) {} - SystemError( std::error_code ec, char const * what ) - : Error(), std::system_error(ec, what) {} - SystemError( int ev, std::error_category const& ecat ) - : Error(), std::system_error(ev, ecat) {} - SystemError( int ev, std::error_category const& ecat, std::string const& what) - : Error(), std::system_error(ev, ecat, what) {} - SystemError( int ev, std::error_category const& ecat, char const * what) - : Error(), std::system_error(ev, ecat, what) {} - virtual ~SystemError() = default; - - virtual const char* what() const noexcept { return std::system_error::what(); } + enum + { + value = true + }; }; - -#if defined(_MSC_VER) && (_MSC_VER == 1800) -# undef noexcept -#endif - - VULKAN_HPP_INLINE const std::error_category& errorCategory() + template <> + struct StructExtends { - static ErrorCategoryImpl instance; - return instance; - } - - VULKAN_HPP_INLINE std::error_code make_error_code(Result e) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return std::error_code(static_cast(e), errorCategory()); - } - - VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) + enum + { + value = true + }; + }; + template <> + struct StructExtends { - return std::error_condition(static_cast(e), errorCategory()); - } - - class OutOfHostMemoryError : public SystemError + enum + { + value = true + }; + }; + template <> + struct StructExtends { - public: - OutOfHostMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} - OutOfHostMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + enum + { + value = true + }; }; - class OutOfDeviceMemoryError : public SystemError + //=== VK_NV_corner_sampled_image === + template <> + struct StructExtends { - public: - OutOfDeviceMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} - OutOfDeviceMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + enum + { + value = true + }; }; - - class InitializationFailedError : public SystemError + template <> + struct StructExtends { - public: - InitializationFailedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} - InitializationFailedError( char const * message ) - : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + enum + { + value = true + }; }; - class DeviceLostError : public SystemError + //=== VK_NV_external_memory === + template <> + struct StructExtends { - public: - DeviceLostError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} - DeviceLostError( char const * message ) - : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + enum + { + value = true + }; }; - - class MemoryMapFailedError : public SystemError + template <> + struct StructExtends { - public: - MemoryMapFailedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} - MemoryMapFailedError( char const * message ) - : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + enum + { + value = true + }; }; - class LayerNotPresentError : public SystemError +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + template <> + struct StructExtends { - public: - LayerNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} - LayerNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + enum + { + value = true + }; }; - - class ExtensionNotPresentError : public SystemError + template <> + struct StructExtends { - public: - ExtensionNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} - ExtensionNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + enum + { + value = true + }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class FeatureNotPresentError : public SystemError +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + template <> + struct StructExtends { - public: - FeatureNotPresentError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} - FeatureNotPresentError( char const * message ) - : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + enum + { + value = true + }; }; - - class IncompatibleDriverError : public SystemError + template <> + struct StructExtends { - public: - IncompatibleDriverError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} - IncompatibleDriverError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + enum + { + value = true + }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class TooManyObjectsError : public SystemError + //=== VK_EXT_validation_flags === + template <> + struct StructExtends { - public: - TooManyObjectsError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} - TooManyObjectsError( char const * message ) - : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + enum + { + value = true + }; }; - class FormatNotSupportedError : public SystemError + //=== VK_EXT_astc_decode_mode === + template <> + struct StructExtends { - public: - FormatNotSupportedError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} - FormatNotSupportedError( char const * message ) - : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + enum + { + value = true + }; }; - - class FragmentedPoolError : public SystemError + template <> + struct StructExtends { - public: - FragmentedPoolError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} - FragmentedPoolError( char const * message ) - : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + enum + { + value = true + }; }; - - class OutOfPoolMemoryError : public SystemError + template <> + struct StructExtends { - public: - OutOfPoolMemoryError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} - OutOfPoolMemoryError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + enum + { + value = true + }; }; - class InvalidExternalHandleError : public SystemError +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + template <> + struct StructExtends { - public: - InvalidExternalHandleError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} - InvalidExternalHandleError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + enum + { + value = true + }; }; - - class SurfaceLostKHRError : public SystemError + template <> + struct StructExtends { - public: - SurfaceLostKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} - SurfaceLostKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + enum + { + value = true + }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class NativeWindowInUseKHRError : public SystemError + //=== VK_KHR_external_memory_fd === + template <> + struct StructExtends { - public: - NativeWindowInUseKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} - NativeWindowInUseKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + enum + { + value = true + }; }; - class OutOfDateKHRError : public SystemError +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + template <> + struct StructExtends { - public: - OutOfDateKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} - OutOfDateKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + enum + { + value = true + }; }; - - class IncompatibleDisplayKHRError : public SystemError + template <> + struct StructExtends { - public: - IncompatibleDisplayKHRError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} - IncompatibleDisplayKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + enum + { + value = true + }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class ValidationFailedEXTError : public SystemError +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + template <> + struct StructExtends { - public: - ValidationFailedEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} - ValidationFailedEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + enum + { + value = true + }; }; - - class InvalidShaderNVError : public SystemError + template <> + struct StructExtends { - public: - InvalidShaderNVError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} - InvalidShaderNVError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + enum + { + value = true + }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + //=== VK_KHR_push_descriptor === + template <> + struct StructExtends { - public: - InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} - InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + enum + { + value = true + }; }; - class FragmentationEXTError : public SystemError + //=== VK_EXT_conditional_rendering === + template <> + struct StructExtends { - public: - FragmentationEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} - FragmentationEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + enum + { + value = true + }; }; - - class NotPermittedEXTError : public SystemError + template <> + struct StructExtends { - public: - NotPermittedEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} - NotPermittedEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + enum + { + value = true + }; }; - - class InvalidDeviceAddressEXTError : public SystemError + template <> + struct StructExtends { - public: - InvalidDeviceAddressEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorInvalidDeviceAddressEXT ), message ) {} - InvalidDeviceAddressEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidDeviceAddressEXT ), message ) {} + enum + { + value = true + }; }; - class FullScreenExclusiveModeLostEXTError : public SystemError + //=== VK_KHR_incremental_present === + template <> + struct StructExtends { - public: - FullScreenExclusiveModeLostEXTError( std::string const& message ) - : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} - FullScreenExclusiveModeLostEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} - }; - - VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) - { - switch ( result ) - { - case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); - case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); - case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); - case Result::eErrorDeviceLost: throw DeviceLostError( message ); - case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); - case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); - case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); - case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); - case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); - case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); - case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); - case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); - case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); - case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); - case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); - case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); - case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); - case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); - case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); - case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); - case Result::eErrorFragmentationEXT: throw FragmentationEXTError( message ); - case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); - case Result::eErrorInvalidDeviceAddressEXT: throw InvalidDeviceAddressEXTError( message ); - case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); - default: throw SystemError( make_error_code( result ) ); - } - } -#endif - - template void ignore(T const&) {} + enum + { + value = true + }; + }; - template - struct ResultValue + //=== VK_NV_clip_space_w_scaling === + template <> + struct StructExtends { - ResultValue( Result r, T & v ) - : result( r ) - , value( v ) - {} - - ResultValue( Result r, T && v ) - : result( r ) - , value( std::move( v ) ) - {} - - Result result; - T value; - - operator std::tuple() { return std::tuple(result, value); } + enum + { + value = true + }; }; - template - struct ResultValueType + //=== VK_EXT_display_control === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - typedef ResultValue type; -#else - typedef T type; -#endif + enum + { + value = true + }; }; + //=== VK_GOOGLE_display_timing === template <> - struct ResultValueType + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - typedef Result type; -#else - typedef void type; -#endif + enum + { + value = true + }; }; - VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) + //=== VK_NVX_multiview_per_view_attributes === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); - return result; -#else - if ( result != Result::eSuccess ) + enum { - throwResultException( result, message ); - } -#endif - } + value = true + }; + }; - template - VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) + //=== VK_NV_viewport_swizzle === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); - return ResultValue( result, std::move( data ) ); -#else - if ( result != Result::eSuccess ) + enum { - throwResultException( result, message ); - } - return std::move( data ); -#endif - } + value = true + }; + }; - VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list successCodes ) + //=== VK_EXT_discard_rectangles === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); -#else - if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + enum { - throwResultException( result, message ); - } -#endif - return result; - } - - template - VULKAN_HPP_INLINE ResultValue createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + value = true + }; + }; + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); -#else - if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + enum { - throwResultException( result, message ); - } -#endif - return ResultValue( result, data ); - } + value = true + }; + }; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const& deleter ) + //=== VK_EXT_conservative_rasterization === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore(message); - VULKAN_HPP_ASSERT( result == Result::eSuccess ); - return ResultValue>( result, UniqueHandle(data, deleter) ); -#else - if ( result != Result::eSuccess ) + enum { - throwResultException( result, message ); - } - return UniqueHandle(data, deleter); -#endif - } -#endif - - struct AccelerationStructureCreateInfoNV; - struct AccelerationStructureInfoNV; - struct AccelerationStructureMemoryRequirementsInfoNV; - struct AcquireNextImageInfoKHR; - struct AllocationCallbacks; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferFormatPropertiesANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferPropertiesANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidHardwareBufferUsageANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct AndroidSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ApplicationInfo; - struct AttachmentDescription; - struct AttachmentDescription2KHR; - struct AttachmentReference; - struct AttachmentReference2KHR; - struct AttachmentSampleLocationsEXT; - struct BaseInStructure; - struct BaseOutStructure; - struct BindAccelerationStructureMemoryInfoNV; - struct BindBufferMemoryDeviceGroupInfo; - using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; - struct BindBufferMemoryInfo; - using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; - struct BindImageMemoryDeviceGroupInfo; - using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; - struct BindImageMemoryInfo; - using BindImageMemoryInfoKHR = BindImageMemoryInfo; - struct BindImageMemorySwapchainInfoKHR; - struct BindImagePlaneMemoryInfo; - using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; - struct BindSparseInfo; - struct BufferCopy; - struct BufferCreateInfo; - struct BufferDeviceAddressCreateInfoEXT; - struct BufferDeviceAddressInfoEXT; - struct BufferImageCopy; - struct BufferMemoryBarrier; - struct BufferMemoryRequirementsInfo2; - using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; - struct BufferViewCreateInfo; - struct CalibratedTimestampInfoEXT; - struct CheckpointDataNV; - struct ClearAttachment; - union ClearColorValue; - struct ClearDepthStencilValue; - struct ClearRect; - union ClearValue; - struct CmdProcessCommandsInfoNVX; - struct CmdReserveSpaceForCommandsInfoNVX; - struct CoarseSampleLocationNV; - struct CoarseSampleOrderCustomNV; - struct CommandBufferAllocateInfo; - struct CommandBufferBeginInfo; - struct CommandBufferInheritanceConditionalRenderingInfoEXT; - struct CommandBufferInheritanceInfo; - struct CommandPoolCreateInfo; - struct ComponentMapping; - struct ComputePipelineCreateInfo; - struct ConditionalRenderingBeginInfoEXT; - struct ConformanceVersionKHR; - struct CooperativeMatrixPropertiesNV; - struct CopyDescriptorSet; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct D3D12FenceSubmitInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct DebugMarkerMarkerInfoEXT; - struct DebugMarkerObjectNameInfoEXT; - struct DebugMarkerObjectTagInfoEXT; - struct DebugReportCallbackCreateInfoEXT; - struct DebugUtilsLabelEXT; - struct DebugUtilsMessengerCallbackDataEXT; - struct DebugUtilsMessengerCreateInfoEXT; - struct DebugUtilsObjectNameInfoEXT; - struct DebugUtilsObjectTagInfoEXT; - struct DedicatedAllocationBufferCreateInfoNV; - struct DedicatedAllocationImageCreateInfoNV; - struct DedicatedAllocationMemoryAllocateInfoNV; - struct DescriptorBufferInfo; - struct DescriptorImageInfo; - struct DescriptorPoolCreateInfo; - struct DescriptorPoolInlineUniformBlockCreateInfoEXT; - struct DescriptorPoolSize; - struct DescriptorSetAllocateInfo; - struct DescriptorSetLayoutBinding; - struct DescriptorSetLayoutBindingFlagsCreateInfoEXT; - struct DescriptorSetLayoutCreateInfo; - struct DescriptorSetLayoutSupport; - using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; - struct DescriptorSetVariableDescriptorCountAllocateInfoEXT; - struct DescriptorSetVariableDescriptorCountLayoutSupportEXT; - struct DescriptorUpdateTemplateCreateInfo; - using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; - struct DescriptorUpdateTemplateEntry; - using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; - struct DeviceCreateInfo; - struct DeviceEventInfoEXT; - struct DeviceGeneratedCommandsFeaturesNVX; - struct DeviceGeneratedCommandsLimitsNVX; - struct DeviceGroupBindSparseInfo; - using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; - struct DeviceGroupCommandBufferBeginInfo; - using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; - struct DeviceGroupDeviceCreateInfo; - using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; - struct DeviceGroupPresentCapabilitiesKHR; - struct DeviceGroupPresentInfoKHR; - struct DeviceGroupRenderPassBeginInfo; - using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; - struct DeviceGroupSubmitInfo; - using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; - struct DeviceGroupSwapchainCreateInfoKHR; - struct DeviceMemoryOverallocationCreateInfoAMD; - struct DeviceQueueCreateInfo; - struct DeviceQueueGlobalPriorityCreateInfoEXT; - struct DeviceQueueInfo2; - struct DispatchIndirectCommand; - struct DisplayEventInfoEXT; - struct DisplayModeCreateInfoKHR; - struct DisplayModeParametersKHR; - struct DisplayModeProperties2KHR; - struct DisplayModePropertiesKHR; - struct DisplayNativeHdrSurfaceCapabilitiesAMD; - struct DisplayPlaneCapabilities2KHR; - struct DisplayPlaneCapabilitiesKHR; - struct DisplayPlaneInfo2KHR; - struct DisplayPlaneProperties2KHR; - struct DisplayPlanePropertiesKHR; - struct DisplayPowerInfoEXT; - struct DisplayPresentInfoKHR; - struct DisplayProperties2KHR; - struct DisplayPropertiesKHR; - struct DisplaySurfaceCreateInfoKHR; - struct DrawIndexedIndirectCommand; - struct DrawIndirectCommand; - struct DrawMeshTasksIndirectCommandNV; - struct DrmFormatModifierPropertiesEXT; - struct DrmFormatModifierPropertiesListEXT; - struct EventCreateInfo; - struct ExportFenceCreateInfo; - using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportFenceWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExportMemoryAllocateInfo; - using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; - struct ExportMemoryAllocateInfoNV; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportMemoryWin32HandleInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExportSemaphoreCreateInfo; - using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ExportSemaphoreWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ExtensionProperties; - struct Extent2D; - struct Extent3D; - struct ExternalBufferProperties; - using ExternalBufferPropertiesKHR = ExternalBufferProperties; - struct ExternalFenceProperties; - using ExternalFencePropertiesKHR = ExternalFenceProperties; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ExternalFormatANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ExternalImageFormatProperties; - using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; - struct ExternalImageFormatPropertiesNV; - struct ExternalMemoryBufferCreateInfo; - using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; - struct ExternalMemoryImageCreateInfo; - using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; - struct ExternalMemoryImageCreateInfoNV; - struct ExternalMemoryProperties; - using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; - struct ExternalSemaphoreProperties; - using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; - struct FenceCreateInfo; - struct FenceGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct FenceGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct FilterCubicImageViewImageFormatPropertiesEXT; - struct FormatProperties; - struct FormatProperties2; - using FormatProperties2KHR = FormatProperties2; - struct FramebufferAttachmentImageInfoKHR; - struct FramebufferAttachmentsCreateInfoKHR; - struct FramebufferCreateInfo; - struct FramebufferMixedSamplesCombinationNV; - struct GeometryAABBNV; - struct GeometryDataNV; - struct GeometryNV; - struct GeometryTrianglesNV; - struct GraphicsPipelineCreateInfo; - struct HdrMetadataEXT; - struct HeadlessSurfaceCreateInfoEXT; -#ifdef VK_USE_PLATFORM_IOS_MVK - struct IOSSurfaceCreateInfoMVK; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - struct ImageBlit; - struct ImageCopy; - struct ImageCreateInfo; - struct ImageDrmFormatModifierExplicitCreateInfoEXT; - struct ImageDrmFormatModifierListCreateInfoEXT; - struct ImageDrmFormatModifierPropertiesEXT; - struct ImageFormatListCreateInfoKHR; - struct ImageFormatProperties; - struct ImageFormatProperties2; - using ImageFormatProperties2KHR = ImageFormatProperties2; - struct ImageMemoryBarrier; - struct ImageMemoryRequirementsInfo2; - using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; -#ifdef VK_USE_PLATFORM_FUCHSIA - struct ImagePipeSurfaceCreateInfoFUCHSIA; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct ImagePlaneMemoryRequirementsInfo; - using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; - struct ImageResolve; - struct ImageSparseMemoryRequirementsInfo2; - using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; - struct ImageStencilUsageCreateInfoEXT; - struct ImageSubresource; - struct ImageSubresourceLayers; - struct ImageSubresourceRange; - struct ImageSwapchainCreateInfoKHR; - struct ImageViewASTCDecodeModeEXT; - struct ImageViewCreateInfo; - struct ImageViewHandleInfoNVX; - struct ImageViewUsageCreateInfo; - using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct ImportAndroidHardwareBufferInfoANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ImportFenceFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportFenceWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportMemoryFdInfoKHR; - struct ImportMemoryHostPointerInfoEXT; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportMemoryWin32HandleInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImportSemaphoreFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct ImportSemaphoreWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct IndirectCommandsLayoutCreateInfoNVX; - struct IndirectCommandsLayoutTokenNVX; - struct IndirectCommandsTokenNVX; - struct InitializePerformanceApiInfoINTEL; - struct InputAttachmentAspectReference; - using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; - struct InstanceCreateInfo; - struct LayerProperties; -#ifdef VK_USE_PLATFORM_MACOS_MVK - struct MacOSSurfaceCreateInfoMVK; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - struct MappedMemoryRange; - struct MemoryAllocateFlagsInfo; - using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; - struct MemoryAllocateInfo; - struct MemoryBarrier; - struct MemoryDedicatedAllocateInfo; - using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; - struct MemoryDedicatedRequirements; - using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; - struct MemoryFdPropertiesKHR; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - struct MemoryGetAndroidHardwareBufferInfoANDROID; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct MemoryGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct MemoryHeap; - struct MemoryHostPointerPropertiesEXT; - struct MemoryPriorityAllocateInfoEXT; - struct MemoryRequirements; - struct MemoryRequirements2; - using MemoryRequirements2KHR = MemoryRequirements2; - struct MemoryType; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct MemoryWin32HandlePropertiesKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - struct MetalSurfaceCreateInfoEXT; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct MultisamplePropertiesEXT; - struct ObjectTableCreateInfoNVX; - struct ObjectTableDescriptorSetEntryNVX; - struct ObjectTableEntryNVX; - struct ObjectTableIndexBufferEntryNVX; - struct ObjectTablePipelineEntryNVX; - struct ObjectTablePushConstantEntryNVX; - struct ObjectTableVertexBufferEntryNVX; - struct Offset2D; - struct Offset3D; - struct PastPresentationTimingGOOGLE; - struct PerformanceConfigurationAcquireInfoINTEL; - struct PerformanceMarkerInfoINTEL; - struct PerformanceOverrideInfoINTEL; - struct PerformanceStreamMarkerInfoINTEL; - union PerformanceValueDataINTEL; - struct PerformanceValueINTEL; - struct PhysicalDevice16BitStorageFeatures; - using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; - struct PhysicalDevice8BitStorageFeaturesKHR; - struct PhysicalDeviceASTCDecodeFeaturesEXT; - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; - using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; - struct PhysicalDeviceCoherentMemoryFeaturesAMD; - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; - struct PhysicalDeviceConditionalRenderingFeaturesEXT; - struct PhysicalDeviceConservativeRasterizationPropertiesEXT; - struct PhysicalDeviceCooperativeMatrixFeaturesNV; - struct PhysicalDeviceCooperativeMatrixPropertiesNV; - struct PhysicalDeviceCornerSampledImageFeaturesNV; - struct PhysicalDeviceCoverageReductionModeFeaturesNV; - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - struct PhysicalDeviceDepthClipEnableFeaturesEXT; - struct PhysicalDeviceDepthStencilResolvePropertiesKHR; - struct PhysicalDeviceDescriptorIndexingFeaturesEXT; - struct PhysicalDeviceDescriptorIndexingPropertiesEXT; - struct PhysicalDeviceDiscardRectanglePropertiesEXT; - struct PhysicalDeviceDriverPropertiesKHR; - struct PhysicalDeviceExclusiveScissorFeaturesNV; - struct PhysicalDeviceExternalBufferInfo; - using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; - struct PhysicalDeviceExternalFenceInfo; - using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; - struct PhysicalDeviceExternalImageFormatInfo; - using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; - struct PhysicalDeviceExternalMemoryHostPropertiesEXT; - struct PhysicalDeviceExternalSemaphoreInfo; - using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; - struct PhysicalDeviceFeatures; - struct PhysicalDeviceFeatures2; - using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; - struct PhysicalDeviceFloatControlsPropertiesKHR; - struct PhysicalDeviceFragmentDensityMapFeaturesEXT; - struct PhysicalDeviceFragmentDensityMapPropertiesEXT; - struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV; - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; - struct PhysicalDeviceGroupProperties; - using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; - struct PhysicalDeviceHostQueryResetFeaturesEXT; - struct PhysicalDeviceIDProperties; - using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; - struct PhysicalDeviceImageDrmFormatModifierInfoEXT; - struct PhysicalDeviceImageFormatInfo2; - using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; - struct PhysicalDeviceImageViewImageFormatInfoEXT; - struct PhysicalDeviceImagelessFramebufferFeaturesKHR; - struct PhysicalDeviceIndexTypeUint8FeaturesEXT; - struct PhysicalDeviceInlineUniformBlockFeaturesEXT; - struct PhysicalDeviceInlineUniformBlockPropertiesEXT; - struct PhysicalDeviceLimits; - struct PhysicalDeviceLineRasterizationFeaturesEXT; - struct PhysicalDeviceLineRasterizationPropertiesEXT; - struct PhysicalDeviceMaintenance3Properties; - using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; - struct PhysicalDeviceMemoryBudgetPropertiesEXT; - struct PhysicalDeviceMemoryPriorityFeaturesEXT; - struct PhysicalDeviceMemoryProperties; - struct PhysicalDeviceMemoryProperties2; - using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; - struct PhysicalDeviceMeshShaderFeaturesNV; - struct PhysicalDeviceMeshShaderPropertiesNV; - struct PhysicalDeviceMultiviewFeatures; - using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - struct PhysicalDeviceMultiviewProperties; - using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; - struct PhysicalDevicePCIBusInfoPropertiesEXT; - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - struct PhysicalDevicePointClippingProperties; - using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; - struct PhysicalDeviceProperties; - struct PhysicalDeviceProperties2; - using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; - struct PhysicalDeviceProtectedMemoryFeatures; - struct PhysicalDeviceProtectedMemoryProperties; - struct PhysicalDevicePushDescriptorPropertiesKHR; - struct PhysicalDeviceRayTracingPropertiesNV; - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; - struct PhysicalDeviceSampleLocationsPropertiesEXT; - struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT; - struct PhysicalDeviceSamplerYcbcrConversionFeatures; - using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; - struct PhysicalDeviceScalarBlockLayoutFeaturesEXT; - struct PhysicalDeviceShaderAtomicInt64FeaturesKHR; - struct PhysicalDeviceShaderCoreProperties2AMD; - struct PhysicalDeviceShaderCorePropertiesAMD; - struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - struct PhysicalDeviceShaderDrawParametersFeatures; - using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; - struct PhysicalDeviceShaderFloat16Int8FeaturesKHR; - using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8FeaturesKHR; - struct PhysicalDeviceShaderImageFootprintFeaturesNV; - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; - struct PhysicalDeviceShadingRateImageFeaturesNV; - struct PhysicalDeviceShadingRateImagePropertiesNV; - struct PhysicalDeviceSparseImageFormatInfo2; - using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; - struct PhysicalDeviceSparseProperties; - struct PhysicalDeviceSubgroupProperties; - struct PhysicalDeviceSubgroupSizeControlFeaturesEXT; - struct PhysicalDeviceSubgroupSizeControlPropertiesEXT; - struct PhysicalDeviceSurfaceInfo2KHR; - struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; - struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT; - struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; - struct PhysicalDeviceTransformFeedbackFeaturesEXT; - struct PhysicalDeviceTransformFeedbackPropertiesEXT; - struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; - struct PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; - struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; - struct PhysicalDeviceVulkanMemoryModelFeaturesKHR; - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; - struct PipelineCacheCreateInfo; - struct PipelineColorBlendAdvancedStateCreateInfoEXT; - struct PipelineColorBlendAttachmentState; - struct PipelineColorBlendStateCreateInfo; - struct PipelineCompilerControlCreateInfoAMD; - struct PipelineCoverageModulationStateCreateInfoNV; - struct PipelineCoverageReductionStateCreateInfoNV; - struct PipelineCoverageToColorStateCreateInfoNV; - struct PipelineCreationFeedbackCreateInfoEXT; - struct PipelineCreationFeedbackEXT; - struct PipelineDepthStencilStateCreateInfo; - struct PipelineDiscardRectangleStateCreateInfoEXT; - struct PipelineDynamicStateCreateInfo; - struct PipelineExecutableInfoKHR; - struct PipelineExecutableInternalRepresentationKHR; - struct PipelineExecutablePropertiesKHR; - struct PipelineExecutableStatisticKHR; - union PipelineExecutableStatisticValueKHR; - struct PipelineInfoKHR; - struct PipelineInputAssemblyStateCreateInfo; - struct PipelineLayoutCreateInfo; - struct PipelineMultisampleStateCreateInfo; - struct PipelineRasterizationConservativeStateCreateInfoEXT; - struct PipelineRasterizationDepthClipStateCreateInfoEXT; - struct PipelineRasterizationLineStateCreateInfoEXT; - struct PipelineRasterizationStateCreateInfo; - struct PipelineRasterizationStateRasterizationOrderAMD; - struct PipelineRasterizationStateStreamCreateInfoEXT; - struct PipelineRepresentativeFragmentTestStateCreateInfoNV; - struct PipelineSampleLocationsStateCreateInfoEXT; - struct PipelineShaderStageCreateInfo; - struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - struct PipelineTessellationDomainOriginStateCreateInfo; - using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; - struct PipelineTessellationStateCreateInfo; - struct PipelineVertexInputDivisorStateCreateInfoEXT; - struct PipelineVertexInputStateCreateInfo; - struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; - struct PipelineViewportExclusiveScissorStateCreateInfoNV; - struct PipelineViewportShadingRateImageStateCreateInfoNV; - struct PipelineViewportStateCreateInfo; - struct PipelineViewportSwizzleStateCreateInfoNV; - struct PipelineViewportWScalingStateCreateInfoNV; -#ifdef VK_USE_PLATFORM_GGP - struct PresentFrameTokenGGP; -#endif /*VK_USE_PLATFORM_GGP*/ - struct PresentInfoKHR; - struct PresentRegionKHR; - struct PresentRegionsKHR; - struct PresentTimeGOOGLE; - struct PresentTimesInfoGOOGLE; - struct ProtectedSubmitInfo; - struct PushConstantRange; - struct QueryPoolCreateInfo; - struct QueryPoolCreateInfoINTEL; - struct QueueFamilyCheckpointPropertiesNV; - struct QueueFamilyProperties; - struct QueueFamilyProperties2; - using QueueFamilyProperties2KHR = QueueFamilyProperties2; - struct RayTracingPipelineCreateInfoNV; - struct RayTracingShaderGroupCreateInfoNV; - struct Rect2D; - struct RectLayerKHR; - struct RefreshCycleDurationGOOGLE; - struct RenderPassAttachmentBeginInfoKHR; - struct RenderPassBeginInfo; - struct RenderPassCreateInfo; - struct RenderPassCreateInfo2KHR; - struct RenderPassFragmentDensityMapCreateInfoEXT; - struct RenderPassInputAttachmentAspectCreateInfo; - using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; - struct RenderPassMultiviewCreateInfo; - using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; - struct RenderPassSampleLocationsBeginInfoEXT; - struct SampleLocationEXT; - struct SampleLocationsInfoEXT; - struct SamplerCreateInfo; - struct SamplerReductionModeCreateInfoEXT; - struct SamplerYcbcrConversionCreateInfo; - using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; - struct SamplerYcbcrConversionImageFormatProperties; - using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; - struct SamplerYcbcrConversionInfo; - using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; - struct SemaphoreCreateInfo; - struct SemaphoreGetFdInfoKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SemaphoreGetWin32HandleInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ShaderModuleCreateInfo; - struct ShaderModuleValidationCacheCreateInfoEXT; - struct ShaderResourceUsageAMD; - struct ShaderStatisticsInfoAMD; - struct ShadingRatePaletteNV; - struct SharedPresentSurfaceCapabilitiesKHR; - struct SparseBufferMemoryBindInfo; - struct SparseImageFormatProperties; - struct SparseImageFormatProperties2; - using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; - struct SparseImageMemoryBind; - struct SparseImageMemoryBindInfo; - struct SparseImageMemoryRequirements; - struct SparseImageMemoryRequirements2; - using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; - struct SparseImageOpaqueMemoryBindInfo; - struct SparseMemoryBind; - struct SpecializationInfo; - struct SpecializationMapEntry; - struct StencilOpState; -#ifdef VK_USE_PLATFORM_GGP - struct StreamDescriptorSurfaceCreateInfoGGP; -#endif /*VK_USE_PLATFORM_GGP*/ - struct SubmitInfo; - struct SubpassBeginInfoKHR; - struct SubpassDependency; - struct SubpassDependency2KHR; - struct SubpassDescription; - struct SubpassDescription2KHR; - struct SubpassDescriptionDepthStencilResolveKHR; - struct SubpassEndInfoKHR; - struct SubpassSampleLocationsEXT; - struct SubresourceLayout; - struct SurfaceCapabilities2EXT; - struct SurfaceCapabilities2KHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceCapabilitiesFullScreenExclusiveEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceCapabilitiesKHR; - struct SurfaceFormat2KHR; - struct SurfaceFormatKHR; -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveInfoEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct SurfaceFullScreenExclusiveWin32InfoEXT; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceProtectedCapabilitiesKHR; - struct SwapchainCounterCreateInfoEXT; - struct SwapchainCreateInfoKHR; - struct SwapchainDisplayNativeHdrCreateInfoAMD; - struct TextureLODGatherFormatPropertiesAMD; - struct ValidationCacheCreateInfoEXT; - struct ValidationFeaturesEXT; - struct ValidationFlagsEXT; - struct VertexInputAttributeDescription; - struct VertexInputBindingDescription; - struct VertexInputBindingDivisorDescriptionEXT; -#ifdef VK_USE_PLATFORM_VI_NN - struct ViSurfaceCreateInfoNN; -#endif /*VK_USE_PLATFORM_VI_NN*/ - struct Viewport; - struct ViewportSwizzleNV; - struct ViewportWScalingNV; -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - struct WaylandSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32KeyedMutexAcquireReleaseInfoNV; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - struct Win32SurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct WriteDescriptorSet; - struct WriteDescriptorSetAccelerationStructureNV; - struct WriteDescriptorSetInlineUniformBlockEXT; - struct XYColorEXT; -#ifdef VK_USE_PLATFORM_XCB_KHR - struct XcbSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - struct XlibSurfaceCreateInfoKHR; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - class SurfaceKHR + value = true + }; + }; + template <> + struct StructExtends { - public: - using CType = VkSurfaceKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR; - - public: - VULKAN_HPP_CONSTEXPR SurfaceKHR() - : m_surfaceKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) - : m_surfaceKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) - : m_surfaceKHR( surfaceKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) + enum { - m_surfaceKHR = surfaceKHR; - return *this; - } -#endif + value = true + }; + }; - SurfaceKHR & operator=( std::nullptr_t ) + //=== VK_EXT_depth_clip_enable === + template <> + struct StructExtends + { + enum { - m_surfaceKHR = VK_NULL_HANDLE; - return *this; - } - - bool operator==( SurfaceKHR const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_surfaceKHR == rhs.m_surfaceKHR; - } - - bool operator!=(SurfaceKHR const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_surfaceKHR != rhs.m_surfaceKHR; - } + value = true + }; + }; - bool operator<(SurfaceKHR const & rhs ) const + //=== VK_KHR_shared_presentable_image === + template <> + struct StructExtends + { + enum { - return m_surfaceKHR < rhs.m_surfaceKHR; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template <> + struct StructExtends + { + enum { - return m_surfaceKHR; - } + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - explicit operator bool() const + //=== VK_KHR_performance_query === + template <> + struct StructExtends + { + enum { - return m_surfaceKHR != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_surfaceKHR == VK_NULL_HANDLE; - } - - private: - VkSurfaceKHR m_surfaceKHR; + value = true + }; }; - static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = SurfaceKHR; + enum + { + value = true + }; }; - - class DebugReportCallbackEXT + template <> + struct StructExtends { - public: - using CType = VkDebugReportCallbackEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT; - - public: - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) - : m_debugReportCallbackEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) - : m_debugReportCallbackEXT( debugReportCallbackEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) + enum { - m_debugReportCallbackEXT = debugReportCallbackEXT; - return *this; - } -#endif - - DebugReportCallbackEXT & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_debugReportCallbackEXT = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DebugReportCallbackEXT const & rhs ) const - { - return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; - } - - bool operator!=(DebugReportCallbackEXT const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; - } + value = true + }; + }; - bool operator<(DebugReportCallbackEXT const & rhs ) const + //=== VK_EXT_debug_utils === + template <> + struct StructExtends + { + enum { - return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + template <> + struct StructExtends + { + enum { - return m_debugReportCallbackEXT; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugReportCallbackEXT != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugReportCallbackEXT == VK_NULL_HANDLE; - } - - private: - VkDebugReportCallbackEXT m_debugReportCallbackEXT; + value = true + }; }; - static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = DebugReportCallbackEXT; + enum + { + value = true + }; }; - - class DebugUtilsMessengerEXT + template <> + struct StructExtends { - public: - using CType = VkDebugUtilsMessengerEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT; - - public: - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) - : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) - : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) + enum { - m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; - return *this; - } -#endif - - DebugUtilsMessengerEXT & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_debugUtilsMessengerEXT = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - bool operator==( DebugUtilsMessengerEXT const & rhs ) const + //=== VK_EXT_sample_locations === + template <> + struct StructExtends + { + enum { - return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; - } - - bool operator!=(DebugUtilsMessengerEXT const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; - } - - bool operator<(DebugUtilsMessengerEXT const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugUtilsMessengerEXT; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_EXT_blend_operation_advanced === + template <> + struct StructExtends + { + enum { - return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; - } - - private: - VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; + value = true + }; }; - static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = DebugUtilsMessengerEXT; + enum + { + value = true + }; }; - - class DisplayKHR + template <> + struct StructExtends { - public: - using CType = VkDisplayKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR; - - public: - VULKAN_HPP_CONSTEXPR DisplayKHR() - : m_displayKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) - : m_displayKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) - : m_displayKHR( displayKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayKHR & operator=(VkDisplayKHR displayKHR) + enum { - m_displayKHR = displayKHR; - return *this; - } -#endif - - DisplayKHR & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_displayKHR = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( DisplayKHR const & rhs ) const + //=== VK_NV_fragment_coverage_to_color === + template <> + struct StructExtends + { + enum { - return m_displayKHR == rhs.m_displayKHR; - } + value = true + }; + }; - bool operator!=(DisplayKHR const & rhs ) const + //=== VK_KHR_acceleration_structure === + template <> + struct StructExtends + { + enum { - return m_displayKHR != rhs.m_displayKHR; - } - - bool operator<(DisplayKHR const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_displayKHR < rhs.m_displayKHR; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_displayKHR; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_displayKHR != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_NV_framebuffer_mixed_samples === + template <> + struct StructExtends + { + enum { - return m_displayKHR == VK_NULL_HANDLE; - } - - private: - VkDisplayKHR m_displayKHR; + value = true + }; }; - static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); + //=== VK_NV_shader_sm_builtins === template <> - struct cpp_type + struct StructExtends { - using type = DisplayKHR; + enum + { + value = true + }; }; - - class SwapchainKHR + template <> + struct StructExtends { - public: - using CType = VkSwapchainKHR; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR; - - public: - VULKAN_HPP_CONSTEXPR SwapchainKHR() - : m_swapchainKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) - : m_swapchainKHR(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) - : m_swapchainKHR( swapchainKHR ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) + enum { - m_swapchainKHR = swapchainKHR; - return *this; - } -#endif - - SwapchainKHR & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_swapchainKHR = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( SwapchainKHR const & rhs ) const + //=== VK_EXT_image_drm_format_modifier === + template <> + struct StructExtends + { + enum { - return m_swapchainKHR == rhs.m_swapchainKHR; - } - - bool operator!=(SwapchainKHR const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_swapchainKHR != rhs.m_swapchainKHR; - } - - bool operator<(SwapchainKHR const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_swapchainKHR < rhs.m_swapchainKHR; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_swapchainKHR; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_swapchainKHR != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_EXT_validation_cache === + template <> + struct StructExtends + { + enum { - return m_swapchainKHR == VK_NULL_HANDLE; - } - - private: - VkSwapchainKHR m_swapchainKHR; + value = true + }; }; - static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === template <> - struct cpp_type + struct StructExtends { - using type = SwapchainKHR; + enum + { + value = true + }; }; - - class Semaphore + template <> + struct StructExtends { - public: - using CType = VkSemaphore; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore; - - public: - VULKAN_HPP_CONSTEXPR Semaphore() - : m_semaphore(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) - : m_semaphore(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) - : m_semaphore( semaphore ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Semaphore & operator=(VkSemaphore semaphore) - { - m_semaphore = semaphore; - return *this; - } -#endif - - Semaphore & operator=( std::nullptr_t ) - { - m_semaphore = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Semaphore const & rhs ) const - { - return m_semaphore == rhs.m_semaphore; - } - - bool operator!=(Semaphore const & rhs ) const - { - return m_semaphore != rhs.m_semaphore; - } - - bool operator<(Semaphore const & rhs ) const - { - return m_semaphore < rhs.m_semaphore; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const - { - return m_semaphore; - } - - explicit operator bool() const + enum { - return m_semaphore != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_semaphore == VK_NULL_HANDLE; - } - - private: - VkSemaphore m_semaphore; + value = true + }; }; - static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_NV_shading_rate_image === template <> - struct cpp_type + struct StructExtends { - using type = Semaphore; + enum + { + value = true + }; }; - - class Fence + template <> + struct StructExtends { - public: - using CType = VkFence; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence; - - public: - VULKAN_HPP_CONSTEXPR Fence() - : m_fence(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) - : m_fence(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) - : m_fence( fence ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Fence & operator=(VkFence fence) + enum { - m_fence = fence; - return *this; - } -#endif - - Fence & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_fence = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Fence const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_fence == rhs.m_fence; - } - - bool operator!=(Fence const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_fence != rhs.m_fence; - } + value = true + }; + }; - bool operator<(Fence const & rhs ) const + //=== VK_NV_ray_tracing === + template <> + struct StructExtends + { + enum { - return m_fence < rhs.m_fence; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_fence; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_NV_representative_fragment_test === + template <> + struct StructExtends + { + enum { - return m_fence != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_fence == VK_NULL_HANDLE; - } - - private: - VkFence m_fence; + value = true + }; }; - static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Fence; + enum + { + value = true + }; }; - class PerformanceConfigurationINTEL + //=== VK_EXT_filter_cubic === + template <> + struct StructExtends { - public: - using CType = VkPerformanceConfigurationINTEL; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePerformanceConfigurationINTEL; - - public: - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) - : m_performanceConfigurationINTEL(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) - : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) + enum { - m_performanceConfigurationINTEL = performanceConfigurationINTEL; - return *this; - } -#endif - - PerformanceConfigurationINTEL & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_performanceConfigurationINTEL = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( PerformanceConfigurationINTEL const & rhs ) const + //=== VK_EXT_external_memory_host === + template <> + struct StructExtends + { + enum { - return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; - } - - bool operator!=(PerformanceConfigurationINTEL const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; - } + value = true + }; + }; - bool operator<(PerformanceConfigurationINTEL const & rhs ) const + //=== VK_KHR_shader_clock === + template <> + struct StructExtends + { + enum { - return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_performanceConfigurationINTEL; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_AMD_pipeline_compiler_control === + template <> + struct StructExtends + { + enum { - return m_performanceConfigurationINTEL != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_performanceConfigurationINTEL == VK_NULL_HANDLE; - } - - private: - VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL; + value = true + }; }; - static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" ); + //=== VK_AMD_shader_core_properties === template <> - struct cpp_type + struct StructExtends { - using type = PerformanceConfigurationINTEL; + enum + { + value = true + }; }; - class QueryPool +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h265 === + template <> + struct StructExtends { - public: - using CType = VkQueryPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool; - - public: - VULKAN_HPP_CONSTEXPR QueryPool() - : m_queryPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) - : m_queryPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) - : m_queryPool( queryPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - QueryPool & operator=(VkQueryPool queryPool) + enum { - m_queryPool = queryPool; - return *this; - } -#endif - - QueryPool & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_queryPool = VK_NULL_HANDLE; - return *this; - } - - bool operator==( QueryPool const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queryPool == rhs.m_queryPool; - } - - bool operator!=(QueryPool const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queryPool != rhs.m_queryPool; - } - - bool operator<(QueryPool const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queryPool < rhs.m_queryPool; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queryPool; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queryPool != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queryPool == VK_NULL_HANDLE; - } - - private: - VkQueryPool m_queryPool; + value = true + }; }; - static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = QueryPool; + enum + { + value = true + }; }; - - class Buffer + template <> + struct StructExtends { - public: - using CType = VkBuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer; - - public: - VULKAN_HPP_CONSTEXPR Buffer() - : m_buffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) - : m_buffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) - : m_buffer( buffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Buffer & operator=(VkBuffer buffer) - { - m_buffer = buffer; - return *this; - } -#endif - - Buffer & operator=( std::nullptr_t ) - { - m_buffer = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Buffer const & rhs ) const - { - return m_buffer == rhs.m_buffer; - } - - bool operator!=(Buffer const & rhs ) const - { - return m_buffer != rhs.m_buffer; - } - - bool operator<(Buffer const & rhs ) const + enum { - return m_buffer < rhs.m_buffer; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_buffer; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_buffer != VK_NULL_HANDLE; - } + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - bool operator!() const + //=== VK_KHR_global_priority === + template <> + struct StructExtends + { + enum { - return m_buffer == VK_NULL_HANDLE; - } - - private: - VkBuffer m_buffer; + value = true + }; }; - static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Buffer; + enum + { + value = true + }; }; - - class PipelineLayout + template <> + struct StructExtends { - public: - using CType = VkPipelineLayout; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout; - - public: - VULKAN_HPP_CONSTEXPR PipelineLayout() - : m_pipelineLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) - : m_pipelineLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) - : m_pipelineLayout( pipelineLayout ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineLayout & operator=(VkPipelineLayout pipelineLayout) + enum { - m_pipelineLayout = pipelineLayout; - return *this; - } -#endif - - PipelineLayout & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_pipelineLayout = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( PipelineLayout const & rhs ) const + //=== VK_AMD_memory_overallocation_behavior === + template <> + struct StructExtends + { + enum { - return m_pipelineLayout == rhs.m_pipelineLayout; - } + value = true + }; + }; - bool operator!=(PipelineLayout const & rhs ) const + //=== VK_EXT_vertex_attribute_divisor === + template <> + struct StructExtends + { + enum { - return m_pipelineLayout != rhs.m_pipelineLayout; - } - - bool operator<(PipelineLayout const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipelineLayout < rhs.m_pipelineLayout; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipelineLayout; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipelineLayout != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + template <> + struct StructExtends + { + enum { - return m_pipelineLayout == VK_NULL_HANDLE; - } - - private: - VkPipelineLayout m_pipelineLayout; + value = true + }; }; - static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_GGP*/ + //=== VK_NV_compute_shader_derivatives === template <> - struct cpp_type + struct StructExtends { - using type = PipelineLayout; + enum + { + value = true + }; }; - - class DescriptorSet + template <> + struct StructExtends { - public: - using CType = VkDescriptorSet; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet; - - public: - VULKAN_HPP_CONSTEXPR DescriptorSet() - : m_descriptorSet(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) - : m_descriptorSet(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) - : m_descriptorSet( descriptorSet ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSet & operator=(VkDescriptorSet descriptorSet) + enum { - m_descriptorSet = descriptorSet; - return *this; - } -#endif + value = true + }; + }; - DescriptorSet & operator=( std::nullptr_t ) + //=== VK_NV_mesh_shader === + template <> + struct StructExtends + { + enum { - m_descriptorSet = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorSet const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSet == rhs.m_descriptorSet; - } - - bool operator!=(DescriptorSet const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSet != rhs.m_descriptorSet; - } + value = true + }; + }; - bool operator<(DescriptorSet const & rhs ) const + //=== VK_NV_fragment_shader_barycentric === + template <> + struct StructExtends + { + enum { - return m_descriptorSet < rhs.m_descriptorSet; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSet; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_NV_shader_image_footprint === + template <> + struct StructExtends + { + enum { - return m_descriptorSet != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSet == VK_NULL_HANDLE; - } - - private: - VkDescriptorSet m_descriptorSet; + value = true + }; }; - static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); + //=== VK_NV_scissor_exclusive === template <> - struct cpp_type + struct StructExtends { - using type = DescriptorSet; + enum + { + value = true + }; }; - - class Pipeline + template <> + struct StructExtends { - public: - using CType = VkPipeline; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline; - - public: - VULKAN_HPP_CONSTEXPR Pipeline() - : m_pipeline(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) - : m_pipeline(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) - : m_pipeline( pipeline ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Pipeline & operator=(VkPipeline pipeline) + enum { - m_pipeline = pipeline; - return *this; - } -#endif - - Pipeline & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_pipeline = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( Pipeline const & rhs ) const + //=== VK_NV_device_diagnostic_checkpoints === + template <> + struct StructExtends + { + enum { - return m_pipeline == rhs.m_pipeline; - } + value = true + }; + }; - bool operator!=(Pipeline const & rhs ) const + //=== VK_INTEL_shader_integer_functions2 === + template <> + struct StructExtends + { + enum { - return m_pipeline != rhs.m_pipeline; - } - - bool operator<(Pipeline const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipeline < rhs.m_pipeline; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const + //=== VK_INTEL_performance_query === + template <> + struct StructExtends + { + enum { - return m_pipeline; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_EXT_pci_bus_info === + template <> + struct StructExtends + { + enum { - return m_pipeline != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_AMD_display_native_hdr === + template <> + struct StructExtends + { + enum { - return m_pipeline == VK_NULL_HANDLE; - } - - private: - VkPipeline m_pipeline; + value = true + }; }; - static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Pipeline; + enum + { + value = true + }; }; - class ImageView + //=== VK_EXT_fragment_density_map === + template <> + struct StructExtends { - public: - using CType = VkImageView; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView; - - public: - VULKAN_HPP_CONSTEXPR ImageView() - : m_imageView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) - : m_imageView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) - : m_imageView( imageView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ImageView & operator=(VkImageView imageView) - { - m_imageView = imageView; - return *this; - } -#endif - - ImageView & operator=( std::nullptr_t ) - { - m_imageView = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ImageView const & rhs ) const - { - return m_imageView == rhs.m_imageView; - } - - bool operator!=(ImageView const & rhs ) const - { - return m_imageView != rhs.m_imageView; - } - - bool operator<(ImageView const & rhs ) const - { - return m_imageView < rhs.m_imageView; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const + enum { - return m_imageView; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_imageView != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_imageView == VK_NULL_HANDLE; - } - - private: - VkImageView m_imageView; + value = true + }; }; - static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = ImageView; + enum + { + value = true + }; }; - - class Image + template <> + struct StructExtends { - public: - using CType = VkImage; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage; - - public: - VULKAN_HPP_CONSTEXPR Image() - : m_image(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) - : m_image(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) - : m_image( image ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Image & operator=(VkImage image) + enum { - m_image = image; - return *this; - } -#endif + value = true + }; + }; - Image & operator=( std::nullptr_t ) + //=== VK_KHR_fragment_shading_rate === + template <> + struct StructExtends + { + enum { - m_image = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Image const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_image == rhs.m_image; - } - - bool operator!=(Image const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_image != rhs.m_image; - } - - bool operator<(Image const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_image < rhs.m_image; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_image; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_AMD_shader_core_properties2 === + template <> + struct StructExtends + { + enum { - return m_image != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_AMD_device_coherent_memory === + template <> + struct StructExtends + { + enum { - return m_image == VK_NULL_HANDLE; - } - - private: - VkImage m_image; + value = true + }; }; - static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Image; + enum + { + value = true + }; }; - class AccelerationStructureNV + //=== VK_EXT_shader_image_atomic_int64 === + template <> + struct StructExtends { - public: - using CType = VkAccelerationStructureNV; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureNV; - - public: - VULKAN_HPP_CONSTEXPR AccelerationStructureNV() - : m_accelerationStructureNV(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) - : m_accelerationStructureNV(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) - : m_accelerationStructureNV( accelerationStructureNV ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) + enum { - m_accelerationStructureNV = accelerationStructureNV; - return *this; - } -#endif - - AccelerationStructureNV & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_accelerationStructureNV = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( AccelerationStructureNV const & rhs ) const + //=== VK_EXT_memory_budget === + template <> + struct StructExtends + { + enum { - return m_accelerationStructureNV == rhs.m_accelerationStructureNV; - } + value = true + }; + }; - bool operator!=(AccelerationStructureNV const & rhs ) const + //=== VK_EXT_memory_priority === + template <> + struct StructExtends + { + enum { - return m_accelerationStructureNV != rhs.m_accelerationStructureNV; - } - - bool operator<(AccelerationStructureNV const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_accelerationStructureNV < rhs.m_accelerationStructureNV; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_accelerationStructureNV; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_KHR_surface_protected_capabilities === + template <> + struct StructExtends + { + enum { - return m_accelerationStructureNV != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_NV_dedicated_allocation_image_aliasing === + template <> + struct StructExtends + { + enum { - return m_accelerationStructureNV == VK_NULL_HANDLE; - } - - private: - VkAccelerationStructureNV m_accelerationStructureNV; + value = true + }; }; - static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = AccelerationStructureNV; + enum + { + value = true + }; }; - class DescriptorUpdateTemplate + //=== VK_EXT_buffer_device_address === + template <> + struct StructExtends { - public: - using CType = VkDescriptorUpdateTemplate; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate; - - public: - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) - : m_descriptorUpdateTemplate(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) - : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) + enum { - m_descriptorUpdateTemplate = descriptorUpdateTemplate; - return *this; - } -#endif - - DescriptorUpdateTemplate & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_descriptorUpdateTemplate = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorUpdateTemplate const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; - } + value = true + }; + }; - bool operator!=(DescriptorUpdateTemplate const & rhs ) const + //=== VK_EXT_validation_features === + template <> + struct StructExtends + { + enum { - return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; - } + value = true + }; + }; - bool operator<(DescriptorUpdateTemplate const & rhs ) const + //=== VK_KHR_present_wait === + template <> + struct StructExtends + { + enum { - return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorUpdateTemplate; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_NV_cooperative_matrix === + template <> + struct StructExtends + { + enum { - return m_descriptorUpdateTemplate != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorUpdateTemplate == VK_NULL_HANDLE; - } - - private: - VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; + value = true + }; }; - static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = DescriptorUpdateTemplate; + enum + { + value = true + }; }; - using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; - class Event + //=== VK_NV_coverage_reduction_mode === + template <> + struct StructExtends { - public: - using CType = VkEvent; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent; - - public: - VULKAN_HPP_CONSTEXPR Event() - : m_event(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) - : m_event(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) - : m_event( event ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Event & operator=(VkEvent event) - { - m_event = event; - return *this; - } -#endif - - Event & operator=( std::nullptr_t ) - { - m_event = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Event const & rhs ) const - { - return m_event == rhs.m_event; - } - - bool operator!=(Event const & rhs ) const - { - return m_event != rhs.m_event; - } - - bool operator<(Event const & rhs ) const - { - return m_event < rhs.m_event; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const + enum { - return m_event; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_event != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_event == VK_NULL_HANDLE; - } - - private: - VkEvent m_event; + value = true + }; }; - static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); + //=== VK_EXT_fragment_shader_interlock === template <> - struct cpp_type + struct StructExtends { - using type = Event; + enum + { + value = true + }; }; - - class CommandBuffer + template <> + struct StructExtends { - public: - using CType = VkCommandBuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer; - - public: - VULKAN_HPP_CONSTEXPR CommandBuffer() - : m_commandBuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) - : m_commandBuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) - : m_commandBuffer( commandBuffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandBuffer & operator=(VkCommandBuffer commandBuffer) + enum { - m_commandBuffer = commandBuffer; - return *this; - } -#endif + value = true + }; + }; - CommandBuffer & operator=( std::nullptr_t ) + //=== VK_EXT_ycbcr_image_arrays === + template <> + struct StructExtends + { + enum { - m_commandBuffer = VK_NULL_HANDLE; - return *this; - } - - bool operator==( CommandBuffer const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandBuffer == rhs.m_commandBuffer; - } + value = true + }; + }; - bool operator!=(CommandBuffer const & rhs ) const + //=== VK_EXT_provoking_vertex === + template <> + struct StructExtends + { + enum { - return m_commandBuffer != rhs.m_commandBuffer; - } - - bool operator<(CommandBuffer const & rhs ) const - { - return m_commandBuffer < rhs.m_commandBuffer; - } - - template - Result begin( const vk::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginConditionalRenderingEXT( const vk::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginQuery( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, Dispatch const &d = Dispatch() ) const; - - template - void beginQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, uint32_t index, Dispatch const &d = Dispatch() ) const; - - template - void beginRenderPass( const vk::RenderPassBeginInfo* pRenderPassBegin, vk::SubpassContents contents, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, vk::SubpassContents contents, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginRenderPass2KHR( const vk::RenderPassBeginInfo* pRenderPassBegin, const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindIndexBuffer( vk::Buffer buffer, vk::DeviceSize offset, vk::IndexType indexType, Dispatch const &d = Dispatch() ) const; - - template - void bindPipeline( vk::PipelineBindPoint pipelineBindPoint, vk::Pipeline pipeline, Dispatch const &d = Dispatch() ) const; - - template - void bindShadingRateImageNV( vk::ImageView imageView, vk::ImageLayout imageLayout, Dispatch const &d = Dispatch() ) const; - - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, const vk::DeviceSize* pSizes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageBlit* pRegions, vk::Filter filter, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, vk::Filter filter, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void buildAccelerationStructureNV( const vk::AccelerationStructureInfoNV* pInfo, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearAttachments( uint32_t attachmentCount, const vk::ClearAttachment* pAttachments, uint32_t rectCount, const vk::ClearRect* pRects, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearColorValue* pColor, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyAccelerationStructureNV( vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::CopyAccelerationStructureModeNV mode, Dispatch const &d = Dispatch() ) const; - - template - void copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferCopy* pRegions, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; - - template - void debugMarkerBeginEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void debugMarkerEndEXT(Dispatch const &d = Dispatch() ) const; - - template - void debugMarkerInsertEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; - - template - void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; - - template - void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; - - template - void dispatchIndirect( vk::Buffer buffer, vk::DeviceSize offset, Dispatch const &d = Dispatch() ) const; - - template - void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const; - - template - void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const; - - template - void drawIndexedIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawIndexedIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawIndexedIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, vk::Buffer counterBuffer, vk::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = Dispatch() ) const; - - template - void drawIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawMeshTasksIndirectCountNV( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawMeshTasksIndirectNV( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; - - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = Dispatch() ) const; - - template - void endConditionalRenderingEXT(Dispatch const &d = Dispatch() ) const; - - template - void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const; - - template - void endQuery( vk::QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const; - - template - void endQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = Dispatch() ) const; - - template - void endRenderPass(Dispatch const &d = Dispatch() ) const; - - template - void endRenderPass2KHR( const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void executeCommands( uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void fillBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize size, uint32_t data, Dispatch const &d = Dispatch() ) const; - - template - void insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void nextSubpass( vk::SubpassContents contents, Dispatch const &d = Dispatch() ) const; - - template - void nextSubpass2KHR( const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void processCommandsNVX( const vk::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void pushDescriptorSetWithTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, vk::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = Dispatch() ) const; - - template - void reserveSpaceForCommandsNVX( const vk::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void resetEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const; - - template - void resetQueryPool( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = Dispatch() ) const; - - template - void resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageResolve* pRegions, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setBlendConstants( const float blendConstants[4], Dispatch const &d = Dispatch() ) const; - - template - void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = Dispatch() ) const; - - template - void setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = Dispatch() ) const; - - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = Dispatch() ) const; - - template - void setDeviceMask( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const; - - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const; - - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const vk::Rect2D* pDiscardRectangles, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const; - - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const vk::Rect2D* pExclusiveScissors, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = Dispatch() ) const; - - template - void setLineWidth( float lineWidth, Dispatch const &d = Dispatch() ) const; - - template - Result setPerformanceMarkerINTEL( const vk::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setPerformanceOverrideINTEL( const vk::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setPerformanceStreamMarkerINTEL( const vk::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setSampleLocationsEXT( const vk::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setScissor( uint32_t firstScissor, uint32_t scissorCount, const vk::Rect2D* pScissors, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setStencilCompareMask( vk::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = Dispatch() ) const; - - template - void setStencilReference( vk::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = Dispatch() ) const; - - template - void setStencilWriteMask( vk::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = Dispatch() ) const; - - template - void setViewport( uint32_t firstViewport, uint32_t viewportCount, const vk::Viewport* pViewports, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void traceRaysNV( vk::Buffer raygenShaderBindingTableBuffer, vk::DeviceSize raygenShaderBindingOffset, vk::Buffer missShaderBindingTableBuffer, vk::DeviceSize missShaderBindingOffset, vk::DeviceSize missShaderBindingStride, vk::Buffer hitShaderBindingTableBuffer, vk::DeviceSize hitShaderBindingOffset, vk::DeviceSize hitShaderBindingStride, vk::Buffer callableShaderBindingTableBuffer, vk::DeviceSize callableShaderBindingOffset, vk::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = Dispatch() ) const; - - template - void updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize dataSize, const void* pData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void waitEvents( uint32_t eventCount, const vk::Event* pEvents, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents( ArrayProxy events, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const vk::AccelerationStructureNV* pAccelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void writeBufferMarkerAMD( vk::PipelineStageFlagBits pipelineStage, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = Dispatch() ) const; - - template - void writeTimestamp( vk::PipelineStageFlagBits pipelineStage, vk::QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result end(Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type end(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result reset( vk::CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type reset( vk::CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandBuffer; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandBuffer != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandBuffer == VK_NULL_HANDLE; - } - - private: - VkCommandBuffer m_commandBuffer; + value = true + }; }; - static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === template <> - struct cpp_type + struct StructExtends { - using type = CommandBuffer; + enum + { + value = true + }; }; - - class DeviceMemory + template <> + struct StructExtends { - public: - using CType = VkDeviceMemory; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory; - - public: - VULKAN_HPP_CONSTEXPR DeviceMemory() - : m_deviceMemory(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) - : m_deviceMemory(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) - : m_deviceMemory( deviceMemory ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DeviceMemory & operator=(VkDeviceMemory deviceMemory) + enum { - m_deviceMemory = deviceMemory; - return *this; - } -#endif - - DeviceMemory & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_deviceMemory = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DeviceMemory const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_deviceMemory == rhs.m_deviceMemory; - } - - bool operator!=(DeviceMemory const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_deviceMemory != rhs.m_deviceMemory; - } + value = true + }; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - bool operator<(DeviceMemory const & rhs ) const + //=== VK_EXT_line_rasterization === + template <> + struct StructExtends + { + enum { - return m_deviceMemory < rhs.m_deviceMemory; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_deviceMemory; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_deviceMemory != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_deviceMemory == VK_NULL_HANDLE; - } - - private: - VkDeviceMemory m_deviceMemory; + value = true + }; }; - static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); + //=== VK_EXT_shader_atomic_float === template <> - struct cpp_type + struct StructExtends { - using type = DeviceMemory; + enum + { + value = true + }; }; - - class BufferView + template <> + struct StructExtends { - public: - using CType = VkBufferView; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView; - - public: - VULKAN_HPP_CONSTEXPR BufferView() - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) - : m_bufferView(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) - : m_bufferView( bufferView ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - BufferView & operator=(VkBufferView bufferView) + enum { - m_bufferView = bufferView; - return *this; - } -#endif + value = true + }; + }; - BufferView & operator=( std::nullptr_t ) + //=== VK_EXT_index_type_uint8 === + template <> + struct StructExtends + { + enum { - m_bufferView = VK_NULL_HANDLE; - return *this; - } - - bool operator==( BufferView const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_bufferView == rhs.m_bufferView; - } + value = true + }; + }; - bool operator!=(BufferView const & rhs ) const + //=== VK_EXT_extended_dynamic_state === + template <> + struct StructExtends + { + enum { - return m_bufferView != rhs.m_bufferView; - } - - bool operator<(BufferView const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_bufferView < rhs.m_bufferView; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const + //=== VK_KHR_pipeline_executable_properties === + template <> + struct StructExtends + { + enum { - return m_bufferView; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_bufferView != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_EXT_shader_atomic_float2 === + template <> + struct StructExtends + { + enum { - return m_bufferView == VK_NULL_HANDLE; - } - - private: - VkBufferView m_bufferView; + value = true + }; }; - static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = BufferView; + enum + { + value = true + }; }; - class CommandPool + //=== VK_NV_device_generated_commands === + template <> + struct StructExtends { - public: - using CType = VkCommandPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool; - - public: - VULKAN_HPP_CONSTEXPR CommandPool() - : m_commandPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) - : m_commandPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) - : m_commandPool( commandPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - CommandPool & operator=(VkCommandPool commandPool) - { - m_commandPool = commandPool; - return *this; - } -#endif - - CommandPool & operator=( std::nullptr_t ) - { - m_commandPool = VK_NULL_HANDLE; - return *this; - } - - bool operator==( CommandPool const & rhs ) const - { - return m_commandPool == rhs.m_commandPool; - } - - bool operator!=(CommandPool const & rhs ) const - { - return m_commandPool != rhs.m_commandPool; - } - - bool operator<(CommandPool const & rhs ) const + enum { - return m_commandPool < rhs.m_commandPool; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandPool; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandPool != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_commandPool == VK_NULL_HANDLE; - } - - private: - VkCommandPool m_commandPool; + value = true + }; }; - static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); + //=== VK_NV_inherited_viewport_scissor === template <> - struct cpp_type + struct StructExtends { - using type = CommandPool; + enum + { + value = true + }; }; - - class PipelineCache + template <> + struct StructExtends { - public: - using CType = VkPipelineCache; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache; - - public: - VULKAN_HPP_CONSTEXPR PipelineCache() - : m_pipelineCache(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) - : m_pipelineCache(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) - : m_pipelineCache( pipelineCache ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PipelineCache & operator=(VkPipelineCache pipelineCache) + enum { - m_pipelineCache = pipelineCache; - return *this; - } -#endif - - PipelineCache & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_pipelineCache = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( PipelineCache const & rhs ) const + //=== VK_EXT_texel_buffer_alignment === + template <> + struct StructExtends + { + enum { - return m_pipelineCache == rhs.m_pipelineCache; - } - - bool operator!=(PipelineCache const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipelineCache != rhs.m_pipelineCache; - } + value = true + }; + }; - bool operator<(PipelineCache const & rhs ) const + //=== VK_QCOM_render_pass_transform === + template <> + struct StructExtends + { + enum { - return m_pipelineCache < rhs.m_pipelineCache; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipelineCache; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_EXT_device_memory_report === + template <> + struct StructExtends + { + enum { - return m_pipelineCache != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_pipelineCache == VK_NULL_HANDLE; - } - - private: - VkPipelineCache m_pipelineCache; + value = true + }; }; - static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = PipelineCache; + enum + { + value = true + }; }; - class DescriptorPool + //=== VK_EXT_robustness2 === + template <> + struct StructExtends { - public: - using CType = VkDescriptorPool; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool; - - public: - VULKAN_HPP_CONSTEXPR DescriptorPool() - : m_descriptorPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) - : m_descriptorPool(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) - : m_descriptorPool( descriptorPool ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorPool & operator=(VkDescriptorPool descriptorPool) + enum { - m_descriptorPool = descriptorPool; - return *this; - } -#endif - - DescriptorPool & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_descriptorPool = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorPool const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorPool == rhs.m_descriptorPool; - } + value = true + }; + }; - bool operator!=(DescriptorPool const & rhs ) const + //=== VK_EXT_custom_border_color === + template <> + struct StructExtends + { + enum { - return m_descriptorPool != rhs.m_descriptorPool; - } - - bool operator<(DescriptorPool const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorPool < rhs.m_descriptorPool; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorPool; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorPool != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_KHR_present_id === + template <> + struct StructExtends + { + enum { - return m_descriptorPool == VK_NULL_HANDLE; - } - - private: - VkDescriptorPool m_descriptorPool; + value = true + }; }; - static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = DescriptorPool; + enum + { + value = true + }; }; - - class DescriptorSetLayout + template <> + struct StructExtends { - public: - using CType = VkDescriptorSetLayout; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout; - - public: - VULKAN_HPP_CONSTEXPR DescriptorSetLayout() - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) - : m_descriptorSetLayout(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) - : m_descriptorSetLayout( descriptorSetLayout ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) + enum { - m_descriptorSetLayout = descriptorSetLayout; - return *this; - } -#endif + value = true + }; + }; - DescriptorSetLayout & operator=( std::nullptr_t ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + template <> + struct StructExtends + { + enum { - m_descriptorSetLayout = VK_NULL_HANDLE; - return *this; - } - - bool operator==( DescriptorSetLayout const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSetLayout == rhs.m_descriptorSetLayout; - } + value = true + }; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - bool operator!=(DescriptorSetLayout const & rhs ) const + //=== VK_NV_device_diagnostics_config === + template <> + struct StructExtends + { + enum { - return m_descriptorSetLayout != rhs.m_descriptorSetLayout; - } - - bool operator<(DescriptorSetLayout const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSetLayout < rhs.m_descriptorSetLayout; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_descriptorSetLayout; - } + value = true + }; + }; - explicit operator bool() const + //=== VK_KHR_synchronization2 === + template <> + struct StructExtends + { + enum { - return m_descriptorSetLayout != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_KHR_shader_subgroup_uniform_control_flow === + template <> + struct StructExtends + { + enum { - return m_descriptorSetLayout == VK_NULL_HANDLE; - } - - private: - VkDescriptorSetLayout m_descriptorSetLayout; + value = true + }; }; - static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = DescriptorSetLayout; + enum + { + value = true + }; }; - class Framebuffer + //=== VK_NV_fragment_shading_rate_enums === + template <> + struct StructExtends { - public: - using CType = VkFramebuffer; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer; - - public: - VULKAN_HPP_CONSTEXPR Framebuffer() - : m_framebuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) - : m_framebuffer(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) - : m_framebuffer( framebuffer ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Framebuffer & operator=(VkFramebuffer framebuffer) - { - m_framebuffer = framebuffer; - return *this; - } -#endif - - Framebuffer & operator=( std::nullptr_t ) - { - m_framebuffer = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Framebuffer const & rhs ) const - { - return m_framebuffer == rhs.m_framebuffer; - } - - bool operator!=(Framebuffer const & rhs ) const - { - return m_framebuffer != rhs.m_framebuffer; - } - - bool operator<(Framebuffer const & rhs ) const - { - return m_framebuffer < rhs.m_framebuffer; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const - { - return m_framebuffer; - } - - explicit operator bool() const + enum { - return m_framebuffer != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_framebuffer == VK_NULL_HANDLE; - } - - private: - VkFramebuffer m_framebuffer; + value = true + }; }; - static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Framebuffer; + enum + { + value = true + }; }; - - class IndirectCommandsLayoutNVX + template <> + struct StructExtends { - public: - using CType = VkIndirectCommandsLayoutNVX; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNVX; - - public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() - : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) - : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) - : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) + enum { - m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX; - return *this; - } -#endif + value = true + }; + }; - IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) + //=== VK_NV_ray_tracing_motion_blur === + template <> + struct StructExtends + { + enum { - m_indirectCommandsLayoutNVX = VK_NULL_HANDLE; - return *this; - } - - bool operator==( IndirectCommandsLayoutNVX const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX; - } - - bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX; - } - - bool operator<(IndirectCommandsLayoutNVX const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const + //=== VK_EXT_ycbcr_2plane_444_formats === + template <> + struct StructExtends + { + enum { - return m_indirectCommandsLayoutNVX; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_EXT_fragment_density_map2 === + template <> + struct StructExtends + { + enum { - return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE; - } - - private: - VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX; + value = true + }; }; - static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = IndirectCommandsLayoutNVX; + enum + { + value = true + }; }; - - class ObjectTableNVX + template <> + struct StructExtends { - public: - using CType = VkObjectTableNVX; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eObjectTableNVX; - - public: - VULKAN_HPP_CONSTEXPR ObjectTableNVX() - : m_objectTableNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) - : m_objectTableNVX(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) - : m_objectTableNVX( objectTableNVX ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) + enum { - m_objectTableNVX = objectTableNVX; - return *this; - } -#endif + value = true + }; + }; - ObjectTableNVX & operator=( std::nullptr_t ) + //=== VK_QCOM_rotated_copy_commands === + template <> + struct StructExtends + { + enum { - m_objectTableNVX = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ObjectTableNVX const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_objectTableNVX == rhs.m_objectTableNVX; - } + value = true + }; + }; - bool operator!=(ObjectTableNVX const & rhs ) const + //=== VK_KHR_workgroup_memory_explicit_layout === + template <> + struct StructExtends + { + enum { - return m_objectTableNVX != rhs.m_objectTableNVX; - } - - bool operator<(ObjectTableNVX const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_objectTableNVX < rhs.m_objectTableNVX; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const + //=== VK_EXT_4444_formats === + template <> + struct StructExtends + { + enum { - return m_objectTableNVX; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_objectTableNVX != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_ARM_rasterization_order_attachment_access === + template <> + struct StructExtends + { + enum { - return m_objectTableNVX == VK_NULL_HANDLE; - } - - private: - VkObjectTableNVX m_objectTableNVX; + value = true + }; }; - static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = ObjectTableNVX; + enum + { + value = true + }; }; - class RenderPass + //=== VK_EXT_rgba10x6_formats === + template <> + struct StructExtends { - public: - using CType = VkRenderPass; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass; - - public: - VULKAN_HPP_CONSTEXPR RenderPass() - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) - : m_renderPass(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) - : m_renderPass( renderPass ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - RenderPass & operator=(VkRenderPass renderPass) + enum { - m_renderPass = renderPass; - return *this; - } -#endif - - RenderPass & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_renderPass = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( RenderPass const & rhs ) const + //=== VK_KHR_ray_tracing_pipeline === + template <> + struct StructExtends + { + enum { - return m_renderPass == rhs.m_renderPass; - } - - bool operator!=(RenderPass const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_renderPass != rhs.m_renderPass; - } - - bool operator<(RenderPass const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_renderPass < rhs.m_renderPass; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const + //=== VK_KHR_ray_query === + template <> + struct StructExtends + { + enum { - return m_renderPass; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_renderPass != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_VALVE_mutable_descriptor_type === + template <> + struct StructExtends + { + enum { - return m_renderPass == VK_NULL_HANDLE; - } - - private: - VkRenderPass m_renderPass; + value = true + }; }; - static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = RenderPass; + enum + { + value = true + }; }; - - class Sampler + template <> + struct StructExtends { - public: - using CType = VkSampler; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler; - - public: - VULKAN_HPP_CONSTEXPR Sampler() - : m_sampler(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) - : m_sampler(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) - : m_sampler( sampler ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Sampler & operator=(VkSampler sampler) - { - m_sampler = sampler; - return *this; - } -#endif - - Sampler & operator=( std::nullptr_t ) - { - m_sampler = VK_NULL_HANDLE; - return *this; - } - - bool operator==( Sampler const & rhs ) const - { - return m_sampler == rhs.m_sampler; - } - - bool operator!=(Sampler const & rhs ) const - { - return m_sampler != rhs.m_sampler; - } - - bool operator<(Sampler const & rhs ) const - { - return m_sampler < rhs.m_sampler; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const + enum { - return m_sampler; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_sampler != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_EXT_vertex_input_dynamic_state === + template <> + struct StructExtends + { + enum { - return m_sampler == VK_NULL_HANDLE; - } - - private: - VkSampler m_sampler; + value = true + }; }; - static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Sampler; + enum + { + value = true + }; }; - class SamplerYcbcrConversion + //=== VK_EXT_physical_device_drm === + template <> + struct StructExtends { - public: - using CType = VkSamplerYcbcrConversion; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion; - - public: - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) - : m_samplerYcbcrConversion(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) - : m_samplerYcbcrConversion( samplerYcbcrConversion ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) + enum { - m_samplerYcbcrConversion = samplerYcbcrConversion; - return *this; - } -#endif + value = true + }; + }; - SamplerYcbcrConversion & operator=( std::nullptr_t ) + //=== VK_EXT_depth_clip_control === + template <> + struct StructExtends + { + enum { - m_samplerYcbcrConversion = VK_NULL_HANDLE; - return *this; - } - - bool operator==( SamplerYcbcrConversion const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; - } - - bool operator!=(SamplerYcbcrConversion const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; - } + value = true + }; + }; - bool operator<(SamplerYcbcrConversion const & rhs ) const + //=== VK_EXT_primitive_topology_list_restart === + template <> + struct StructExtends + { + enum { - return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; - } - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_samplerYcbcrConversion; - } + value = true + }; + }; - explicit operator bool() const +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + template <> + struct StructExtends + { + enum { - return m_samplerYcbcrConversion != VK_NULL_HANDLE; - } + value = true + }; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - bool operator!() const +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct StructExtends + { + enum { - return m_samplerYcbcrConversion == VK_NULL_HANDLE; - } - - private: - VkSamplerYcbcrConversion m_samplerYcbcrConversion; + value = true + }; }; - static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = SamplerYcbcrConversion; + enum + { + value = true + }; }; - using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; - - class ShaderModule + template <> + struct StructExtends { - public: - using CType = VkShaderModule; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule; - - public: - VULKAN_HPP_CONSTEXPR ShaderModule() - : m_shaderModule(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) - : m_shaderModule(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) - : m_shaderModule( shaderModule ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ShaderModule & operator=(VkShaderModule shaderModule) + enum { - m_shaderModule = shaderModule; - return *this; - } -#endif + value = true + }; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - ShaderModule & operator=( std::nullptr_t ) + //=== VK_HUAWEI_subpass_shading === + template <> + struct StructExtends + { + enum { - m_shaderModule = VK_NULL_HANDLE; - return *this; - } - - bool operator==( ShaderModule const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_shaderModule == rhs.m_shaderModule; - } - - bool operator!=(ShaderModule const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_shaderModule != rhs.m_shaderModule; - } - - bool operator<(ShaderModule const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_shaderModule < rhs.m_shaderModule; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const + //=== VK_HUAWEI_invocation_mask === + template <> + struct StructExtends + { + enum { - return m_shaderModule; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_shaderModule != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_NV_external_memory_rdma === + template <> + struct StructExtends + { + enum { - return m_shaderModule == VK_NULL_HANDLE; - } - - private: - VkShaderModule m_shaderModule; + value = true + }; }; - static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = ShaderModule; + enum + { + value = true + }; }; - class ValidationCacheEXT + //=== VK_EXT_extended_dynamic_state2 === + template <> + struct StructExtends { - public: - using CType = VkValidationCacheEXT; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT; - - public: - VULKAN_HPP_CONSTEXPR ValidationCacheEXT() - : m_validationCacheEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) - : m_validationCacheEXT(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) - : m_validationCacheEXT( validationCacheEXT ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) + enum { - m_validationCacheEXT = validationCacheEXT; - return *this; - } -#endif - - ValidationCacheEXT & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_validationCacheEXT = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( ValidationCacheEXT const & rhs ) const + //=== VK_EXT_color_write_enable === + template <> + struct StructExtends + { + enum { - return m_validationCacheEXT == rhs.m_validationCacheEXT; - } - - bool operator!=(ValidationCacheEXT const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_validationCacheEXT != rhs.m_validationCacheEXT; - } - - bool operator<(ValidationCacheEXT const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_validationCacheEXT < rhs.m_validationCacheEXT; - } + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const + //=== VK_EXT_image_view_min_lod === + template <> + struct StructExtends + { + enum { - return m_validationCacheEXT; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_validationCacheEXT != VK_NULL_HANDLE; - } - - bool operator!() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_validationCacheEXT == VK_NULL_HANDLE; - } - - private: - VkValidationCacheEXT m_validationCacheEXT; + value = true + }; }; - static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); + //=== VK_EXT_multi_draw === template <> - struct cpp_type + struct StructExtends { - using type = ValidationCacheEXT; + enum + { + value = true + }; }; - - class Queue + template <> + struct StructExtends { - public: - using CType = VkQueue; - - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue; - - public: - VULKAN_HPP_CONSTEXPR Queue() - : m_queue(VK_NULL_HANDLE) - {} - - VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) - : m_queue(VK_NULL_HANDLE) - {} - - VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) - : m_queue( queue ) - {} - -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Queue & operator=(VkQueue queue) + enum { - m_queue = queue; - return *this; - } -#endif - - Queue & operator=( std::nullptr_t ) + value = true + }; + }; + template <> + struct StructExtends + { + enum { - m_queue = VK_NULL_HANDLE; - return *this; - } + value = true + }; + }; - bool operator==( Queue const & rhs ) const + //=== VK_EXT_border_color_swizzle === + template <> + struct StructExtends + { + enum { - return m_queue == rhs.m_queue; - } - - bool operator!=(Queue const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queue != rhs.m_queue; - } - - bool operator<(Queue const & rhs ) const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queue < rhs.m_queue; - } - - template - void getCheckpointDataNV( uint32_t* pCheckpointDataCount, vk::CheckpointDataNV* pCheckpointData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getCheckpointDataNV(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindSparse( uint32_t bindInfoCount, const vk::BindSparseInfo* pBindInfo, vk::Fence fence, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type bindSparse( ArrayProxy bindInfo, vk::Fence fence, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const; - - template - void insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result presentKHR( const vk::PresentInfoKHR* pPresentInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result submit( uint32_t submitCount, const vk::SubmitInfo* pSubmits, vk::Fence fence, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type submit( ArrayProxy submits, vk::Fence fence, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type waitIdle(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + value = true + }; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const + //=== VK_EXT_pageable_device_local_memory === + template <> + struct StructExtends + { + enum { - return m_queue; - } - - explicit operator bool() const + value = true + }; + }; + template <> + struct StructExtends + { + enum { - return m_queue != VK_NULL_HANDLE; - } + value = true + }; + }; - bool operator!() const + //=== VK_QCOM_fragment_density_map_offset === + template <> + struct StructExtends + { + enum { - return m_queue == VK_NULL_HANDLE; - } - - private: - VkQueue m_queue; + value = true + }; }; - static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); - template <> - struct cpp_type + struct StructExtends { - using type = Queue; + enum + { + value = true + }; }; - -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Device; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueAccelerationStructureNV = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueBufferView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; - using UniqueCommandBuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueCommandPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = PoolFree; }; - using UniqueDescriptorSet = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorSetLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDescriptorUpdateTemplate = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectFree; }; - using UniqueDeviceMemory = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueEvent = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueFence = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueFramebuffer = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueImage = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueImageView = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueIndirectCommandsLayoutNVX = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueObjectTableNVX = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipeline = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipelineCache = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniquePipelineLayout = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueQueryPool = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueRenderPass = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSampler = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSamplerYcbcrConversion = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSemaphore = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueShaderModule = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSwapchainKHR = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueValidationCacheEXT = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - class Device + template <> + struct StructExtends { - public: - using CType = VkDevice; + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice; + //=== VK_NV_linear_color_attachment === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader + { public: - VULKAN_HPP_CONSTEXPR Device() - : m_device(VK_NULL_HANDLE) - {} +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif + { + if ( !vulkanLibraryName.empty() ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# else +# error unsupported platform +# endif + } + else + { +# if defined( __unix__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __APPLE__ ) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); +# else +# error unsupported platform +# endif + } - VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) - : m_device(VK_NULL_HANDLE) - {} +# ifndef VULKAN_HPP_NO_EXCEPTIONS + if ( m_library == nullptr ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the + // scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } +# endif + } - VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) - : m_device( device ) - {} + DynamicLoader( DynamicLoader const & ) = delete; -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Device & operator=(VkDevice device) + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) { - m_device = device; - return *this; + other.m_library = nullptr; } -#endif - Device & operator=( std::nullptr_t ) + DynamicLoader & operator=( DynamicLoader const & ) = delete; + + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT { - m_device = VK_NULL_HANDLE; + std::swap( m_library, other.m_library ); return *this; } - bool operator==( Device const & rhs ) const + ~DynamicLoader() VULKAN_HPP_NOEXCEPT { - return m_device == rhs.m_device; + if ( m_library ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } } - bool operator!=(Device const & rhs ) const + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT { - return m_device != rhs.m_device; +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + return (T)dlsym( m_library, function ); +# elif defined( _WIN32 ) + return ( T )::GetProcAddress( m_library, function ); +# else +# error unsupported platform +# endif } - bool operator<(Device const & rhs ) const + bool success() const VULKAN_HPP_NOEXCEPT { - return m_device < rhs.m_device; + return m_library != nullptr; } -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + private: +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) + void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; +# else +# error unsupported platform +# endif + }; +#endif - template - Result acquireNextImage2KHR( const vk::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValue acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result acquirePerformanceConfigurationINTEL( const vk::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, vk::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateCommandBuffers( const vk::CommandBufferAllocateInfo* pAllocateInfo, vk::CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateDescriptorSets( const vk::DescriptorSetAllocateInfo* pAllocateInfo, vk::DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result allocateMemory( const vk::MemoryAllocateInfo* pAllocateInfo, const vk::AllocationCallbacks* pAllocator, vk::DeviceMemory* pMemory, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const vk::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; + using PFN_dummy = void ( * )(); + + class DispatchLoaderDynamic : public DispatchLoaderBase + { + public: + //=== VK_VERSION_1_0 === + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + + //=== VK_VERSION_1_2 === + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; #else - template - ResultValueType::type bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindBufferMemory2( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindBufferMemory2KHR( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; #else - template - ResultValueType::type bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindImageMemory2( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result bindImageMemory2KHR( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d = Dispatch() ) const; + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; #else - template - ResultValueType::type compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createAccelerationStructureNV( const vk::AccelerationStructureCreateInfoNV* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBuffer( const vk::BufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Buffer* pBuffer, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createBufferView( const vk::BufferViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::BufferView* pView, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createCommandPool( const vk::CommandPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::CommandPool* pCommandPool, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createComputePipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::ComputePipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValueType::type createComputePipeline( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createComputePipelineUnique( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorPool( const vk::DescriptorPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorPool* pDescriptorPool, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorSetLayout( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorSetLayout* pSetLayout, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorUpdateTemplate( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDescriptorUpdateTemplateKHR( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createEvent( const vk::EventCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Event* pEvent, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createFence( const vk::FenceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createFramebuffer( const vk::FramebufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Framebuffer* pFramebuffer, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createGraphicsPipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::GraphicsPipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValueType::type createGraphicsPipeline( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createGraphicsPipelineUnique( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImage( const vk::ImageCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Image* pImage, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createImageView( const vk::ImageViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ImageView* pView, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createIndirectCommandsLayoutNVX( const vk::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createObjectTableNVX( const vk::ObjectTableCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ObjectTableNVX* pObjectTable, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineCache( const vk::PipelineCacheCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineCache* pPipelineCache, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createPipelineLayout( const vk::PipelineLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineLayout* pPipelineLayout, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createQueryPool( const vk::QueryPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::QueryPool* pQueryPool, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::RayTracingPipelineCreateInfoNV* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValueType::type createRayTracingPipelineNV( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createRayTracingPipelineNVUnique( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass( const vk::RenderPassCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createRenderPass2KHR( const vk::RenderPassCreateInfo2KHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSampler( const vk::SamplerCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Sampler* pSampler, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversion( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSamplerYcbcrConversionKHR( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSemaphore( const vk::SemaphoreCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Semaphore* pSemaphore, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createShaderModule( const vk::ShaderModuleCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ShaderModule* pShaderModule, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSharedSwapchainsKHR( uint32_t swapchainCount, const vk::SwapchainCreateInfoKHR* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchains, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; - template - typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createSwapchainKHR( const vk::SwapchainCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchain, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createValidationCacheEXT( const vk::ValidationCacheCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ValidationCacheEXT* pValidationCache, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectNameEXT( const vk::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result debugMarkerSetObjectTagEXT( const vk::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBuffer( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBuffer( vk::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyBufferView( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBufferView( vk::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyCommandPool( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyCommandPool( vk::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorPool( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorPool( vk::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyEvent( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyEvent( vk::Event event, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Event event, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyFence( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFence( vk::Fence fence, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Fence fence, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyFramebuffer( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFramebuffer( vk::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyImage( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImage( vk::Image image, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Image image, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyImageView( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImageView( vk::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyObjectTableNVX( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyObjectTableNVX( vk::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipeline( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipeline( vk::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipelineCache( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineCache( vk::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyPipelineLayout( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineLayout( vk::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyQueryPool( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyQueryPool( vk::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyRenderPass( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyRenderPass( vk::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySampler( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySampler( vk::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySemaphore( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySemaphore( vk::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyShaderModule( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyShaderModule( vk::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySwapchainKHR( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySwapchainKHR( vk::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitIdle(Dispatch const &d = Dispatch() ) const; + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; #else - template - ResultValueType::type waitIdle(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result displayPowerControlEXT( vk::DisplayKHR display, const vk::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type displayPowerControlEXT( vk::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void freeCommandBuffers( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeCommandBuffers( vk::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void free( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( vk::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result freeDescriptorSets( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type freeDescriptorSets( vk::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result free( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type free( vk::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void freeMemory( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeMemory( vk::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void free( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( vk::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getAccelerationStructureMemoryRequirementsNV( const vk::AccelerationStructureMemoryRequirementsInfoNV* pInfo, vk::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = Dispatch() ) const; - template - StructureChain getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, vk::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const; - template - typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - DeviceAddress getBufferAddressEXT( const vk::BufferDeviceAddressInfoEXT* pInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfoEXT & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements( vk::Buffer buffer, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements getBufferMemoryRequirements( vk::Buffer buffer, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements2( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; - template - StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getBufferMemoryRequirements2KHR( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; - template - StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCalibratedTimestampsEXT( uint32_t timestampCount, const vk::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getDescriptorSetLayoutSupport( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; - template - StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getDescriptorSetLayoutSupportKHR( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; - template - StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getGroupPresentCapabilitiesKHR( vk::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getGroupSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - Result getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryCommitment( vk::DeviceMemory memory, vk::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::DeviceSize getMemoryCommitment( vk::DeviceMemory memory, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, vk::Queue* pQueue, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueue2( const vk::DeviceQueueInfo2* pQueueInfo, vk::Queue* pQueue, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getEventStatus( vk::Event event, Dispatch const &d = Dispatch() ) const; - - template - Result getFenceFdKHR( const vk::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getFenceStatus( vk::Fence fence, Dispatch const &d = Dispatch() ) const; - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getFenceWin32HandleKHR( const vk::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result getImageDrmFormatModifierPropertiesEXT( vk::Image image, vk::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getImageDrmFormatModifierPropertiesEXT( vk::Image image, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements( vk::Image image, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements getImageMemoryRequirements( vk::Image image, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements2( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; - template - StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageMemoryRequirements2KHR( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; - template - StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements( vk::Image image, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getImageSparseMemoryRequirements( vk::Image image, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getImageSparseMemoryRequirements( vk::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements2( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSparseMemoryRequirements2KHR( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getImageSubresourceLayout( vk::Image image, const vk::ImageSubresource* pSubresource, vk::SubresourceLayout* pLayout, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::SubresourceLayout getImageSubresourceLayout( vk::Image image, const ImageSubresource & subresource, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint32_t getImageViewHandleNVX( const vk::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result getMemoryAndroidHardwareBufferANDROID( const vk::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - template - Result getMemoryFdKHR( const vk::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, vk::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, vk::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleKHR( const vk::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; +#else + PFN_dummy vkGetPhysicalDeviceVideoCapabilitiesKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceVideoFormatPropertiesKHR_placeholder = 0; + PFN_dummy vkCreateVideoSessionKHR_placeholder = 0; + PFN_dummy vkDestroyVideoSessionKHR_placeholder = 0; + PFN_dummy vkGetVideoSessionMemoryRequirementsKHR_placeholder = 0; + PFN_dummy vkBindVideoSessionMemoryKHR_placeholder = 0; + PFN_dummy vkCreateVideoSessionParametersKHR_placeholder = 0; + PFN_dummy vkUpdateVideoSessionParametersKHR_placeholder = 0; + PFN_dummy vkDestroyVideoSessionParametersKHR_placeholder = 0; + PFN_dummy vkCmdBeginVideoCodingKHR_placeholder = 0; + PFN_dummy vkCmdEndVideoCodingKHR_placeholder = 0; + PFN_dummy vkCmdControlVideoCodingKHR_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; +#else + PFN_dummy vkCmdDecodeVideoKHR_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, vk::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - template - Result getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, vk::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, vk::PerformanceValueINTEL* pValue, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineCacheData( vk::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineCacheData( vk::PipelineCache pipelineCache, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineCacheData( vk::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutableInternalRepresentationsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, vk::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutablePropertiesKHR( const vk::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, vk::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPipelineExecutableStatisticsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, vk::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, vk::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getRenderAreaGranularity( vk::RenderPass renderPass, vk::Extent2D* pGranularity, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::Extent2D getRenderAreaGranularity( vk::RenderPass renderPass, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSemaphoreFdKHR( const vk::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSemaphoreWin32HandleKHR( const vk::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - template - Result getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainImagesKHR( vk::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, vk::Image* pSwapchainImages, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSwapchainStatusKHR( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; - - template - Result getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result importFenceFdKHR( const vk::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importFenceWin32HandleKHR( const vk::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +#endif /*VK_USE_PLATFORM_GGP*/ - template - Result importSemaphoreFdKHR( const vk::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result importSemaphoreWin32HandleKHR( const vk::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_NV_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - template - Result initializePerformanceApiINTEL( const vk::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags, void** ppData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mergePipelineCaches( vk::PipelineCache dstCache, uint32_t srcCacheCount, const vk::PipelineCache* pSrcCaches, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type mergePipelineCaches( vk::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const vk::ValidationCacheEXT* pSrcCaches, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerEventEXT( const vk::DeviceEventInfoEXT* pDeviceEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerDisplayEventEXT( vk::DisplayKHR display, const vk::DisplayEventInfoEXT* pDisplayEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type registerDisplayEventEXT( vk::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result registerObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type registerObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; #else - template - ResultValueType::type releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const; +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; #else - template - ResultValueType::type resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetEvent( vk::Event event, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type resetEvent( vk::Event event, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result resetFences( uint32_t fenceCount, const vk::Fence* pFences, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void resetQueryPoolEXT( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = Dispatch() ) const; - - template - Result setDebugUtilsObjectNameEXT( const vk::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result setDebugUtilsObjectTagEXT( const vk::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setEvent( vk::Event event, Dispatch const &d = Dispatch() ) const; -#else - template - ResultValueType::type setEvent( vk::Event event, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - template - void setHdrMetadataEXT( uint32_t swapchainCount, const vk::SwapchainKHR* pSwapchains, const vk::HdrMetadataEXT* pMetadata, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - template - void setLocalDimmingAMD( vk::SwapchainKHR swapChain, vk::Bool32 localDimmingEnable, Dispatch const &d = Dispatch() ) const; + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - template - void trimCommandPool( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - void trimCommandPoolKHR( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const; + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - template - void uninitializePerformanceApiINTEL(Dispatch const &d = Dispatch() ) const; + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - template - void unmapMemory( vk::DeviceMemory memory, Dispatch const &d = Dispatch() ) const; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - Result unregisterObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type unregisterObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - template - void updateDescriptorSetWithTemplate( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const; + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - template - void updateDescriptorSetWithTemplateKHR( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const; + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - template - void updateDescriptorSets( uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const vk::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - template - Result waitForFences( uint32_t fenceCount, const vk::Fence* pFences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result waitForFences( ArrayProxy fences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const - { - return m_device; - } + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - explicit operator bool() const - { - return m_device != VK_NULL_HANDLE; - } +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - bool operator!() const - { - return m_device == VK_NULL_HANDLE; - } + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - private: - VkDevice m_device; - }; - static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - template <> - struct cpp_type - { - using type = Device; - }; + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - class DisplayModeKHR - { - public: - using CType = VkDisplayModeKHR; + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR; + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - public: - VULKAN_HPP_CONSTEXPR DisplayModeKHR() - : m_displayModeKHR(VK_NULL_HANDLE) - {} + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) - : m_displayModeKHR(VK_NULL_HANDLE) - {} + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) - : m_displayModeKHR( displayModeKHR ) - {} + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) - { - m_displayModeKHR = displayModeKHR; - return *this; - } -#endif +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - DisplayModeKHR & operator=( std::nullptr_t ) - { - m_displayModeKHR = VK_NULL_HANDLE; - return *this; - } + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - bool operator==( DisplayModeKHR const & rhs ) const - { - return m_displayModeKHR == rhs.m_displayModeKHR; - } + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - bool operator!=(DisplayModeKHR const & rhs ) const - { - return m_displayModeKHR != rhs.m_displayModeKHR; - } + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - bool operator<(DisplayModeKHR const & rhs ) const - { - return m_displayModeKHR < rhs.m_displayModeKHR; - } + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const - { - return m_displayModeKHR; - } +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ - explicit operator bool() const - { - return m_displayModeKHR != VK_NULL_HANDLE; - } +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - bool operator!() const - { - return m_displayModeKHR == VK_NULL_HANDLE; - } + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - private: - VkDisplayModeKHR m_displayModeKHR; - }; - static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> - struct cpp_type - { - using type = DisplayModeKHR; - }; + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDevice = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - class PhysicalDevice - { - public: - using CType = VkPhysicalDevice; + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice; + //=== VK_KHR_bind_memory2 === + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - public: - VULKAN_HPP_CONSTEXPR PhysicalDevice() - : m_physicalDevice(VK_NULL_HANDLE) - {} + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) - : m_physicalDevice(VK_NULL_HANDLE) - {} + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) - : m_physicalDevice( physicalDevice ) - {} + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) - { - m_physicalDevice = physicalDevice; - return *this; - } -#endif + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - PhysicalDevice & operator=( std::nullptr_t ) - { - m_physicalDevice = VK_NULL_HANDLE; - return *this; - } + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - bool operator==( PhysicalDevice const & rhs ) const - { - return m_physicalDevice == rhs.m_physicalDevice; - } + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - bool operator!=(PhysicalDevice const & rhs ) const - { - return m_physicalDevice != rhs.m_physicalDevice; - } + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - bool operator<(PhysicalDevice const & rhs ) const - { - return m_physicalDevice < rhs.m_physicalDevice; - } + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result acquireXlibDisplayEXT( Display* dpy, vk::DisplayKHR display, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type acquireXlibDisplayEXT( vk::DisplayKHR display, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - template - Result createDevice( const vk::DeviceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Device* pDevice, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDisplayModeKHR( vk::DisplayKHR display, const vk::DisplayModeCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DisplayModeKHR* pMode, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDisplayModeKHR( vk::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayModeProperties2KHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModeProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayModeProperties2KHR( vk::DisplayKHR display, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayModeProperties2KHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayModePropertiesKHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayModePropertiesKHR( vk::DisplayKHR display, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayModePropertiesKHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneCapabilities2KHR( const vk::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, vk::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, vk::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, vk::DisplayKHR* pDisplays, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, vk::TimeDomainEXT* pTimeDomains, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, vk::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, vk::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayProperties2KHR( uint32_t* pPropertyCount, vk::DisplayProperties2KHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalBufferProperties( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalBufferPropertiesKHR( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalFenceProperties( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalFencePropertiesKHR( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, vk::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalSemaphoreProperties( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getExternalSemaphorePropertiesKHR( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures( vk::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceFeatures getFeatures(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures2( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = Dispatch() ) const; - template - StructureChain getFeatures2(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFeatures2KHR( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = Dispatch() ) const; - template - StructureChain getFeatures2KHR(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties( vk::Format format, vk::FormatProperties* pFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::FormatProperties getFormatProperties( vk::Format format, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties2( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::FormatProperties2 getFormatProperties2( vk::Format format, Dispatch const &d = Dispatch() ) const; - template - StructureChain getFormatProperties2( vk::Format format, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getFormatProperties2KHR( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::FormatProperties2 getFormatProperties2KHR( vk::Format format, Dispatch const &d = Dispatch() ) const; - template - StructureChain getFormatProperties2KHR( vk::Format format, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getGeneratedCommandsPropertiesNVX( vk::DeviceGeneratedCommandsFeaturesNVX* pFeatures, vk::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties2( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; - template - typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getImageFormatProperties2KHR( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; - template - typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties( vk::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties2( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = Dispatch() ) const; - template - StructureChain getMemoryProperties2(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMemoryProperties2KHR( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = Dispatch() ) const; - template - StructureChain getMemoryProperties2KHR(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, vk::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::MultisamplePropertiesEXT getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getPresentRectanglesKHR( vk::SurfaceKHR surface, uint32_t* pRectCount, vk::Rect2D* pRects, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPresentRectanglesKHR( vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getPresentRectanglesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties( vk::PhysicalDeviceProperties* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceProperties getProperties(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties2( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = Dispatch() ) const; - template - StructureChain getProperties2(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getProperties2KHR( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - vk::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = Dispatch() ) const; - template - StructureChain getProperties2KHR(Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2KHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, uint32_t* pPropertyCount, vk::SparseImageFormatProperties* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties2( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void getSparseImageFormatProperties2KHR( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, vk::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, vk::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilities2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; - template - typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, vk::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormats2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, vk::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceFormatsKHR( vk::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, vk::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfaceFormatsKHR( vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfaceFormatsKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result getSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - template - Result getSurfacePresentModesKHR( vk::SurfaceKHR surface, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type getSurfacePresentModesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, vk::Bool32* pSupported, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = Dispatch() ) const; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; -#ifdef VK_USE_PLATFORM_XCB_KHR - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, vk::DisplayKHR* pDisplay, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; #else - template - ResultValueType::type releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const - { - return m_physicalDevice; - } + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - explicit operator bool() const - { - return m_physicalDevice != VK_NULL_HANDLE; - } + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - bool operator!() const - { - return m_physicalDevice == VK_NULL_HANDLE; - } + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - private: - VkPhysicalDevice m_physicalDevice; - }; - static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - template <> - struct cpp_type - { - using type = PhysicalDevice; - }; + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Instance; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDebugReportCallbackEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueDebugUtilsMessengerEXT = UniqueHandle; - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueSurfaceKHR = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; +#else + PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class Instance - { - public: - using CType = VkInstance; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - public: - VULKAN_HPP_CONSTEXPR Instance() - : m_instance(VK_NULL_HANDLE) - {} +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +#else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) - : m_instance(VK_NULL_HANDLE) - {} + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) - : m_instance( instance ) - {} + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; -#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) - Instance & operator=(VkInstance instance) + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; + + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; + +#if !defined( VK_NO_PROTOTYPES ) + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device, + DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = + dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = + dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), + getInstanceProcAddr, + static_cast( device ), + device ? getDeviceProcAddr : nullptr ); + } + + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan + // library. + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT { - m_instance = instance; - return *this; + static DynamicLoader dl; + init( instance, device, dl ); } -#endif +#endif // !defined( VK_NO_PROTOTYPES ) - Instance & operator=( std::nullptr_t ) + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { - m_instance = VK_NULL_HANDLE; - return *this; + init( getInstanceProcAddr ); } - bool operator==( Instance const & rhs ) const + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { - return m_instance == rhs.m_instance; + VULKAN_HPP_ASSERT( getInstanceProcAddr ); + + vkGetInstanceProcAddr = getInstanceProcAddr; + + //=== VK_VERSION_1_0 === + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( + vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = + PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumerateInstanceVersion = + PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); } - bool operator!=(Instance const & rhs ) const + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT { - return m_instance != rhs.m_instance; + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); } - bool operator<(Instance const & rhs ) const + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT { - return m_instance < rhs.m_instance; + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } } -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - Result createAndroidSurfaceKHR( const vk::AndroidSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); - template - Result createDebugReportCallbackEXT( const vk::DebugReportCallbackCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugReportCallbackEXT* pCallback, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDebugUtilsMessengerEXT( const vk::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createDisplayPlaneSurfaceKHR( const vk::DisplaySurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result createHeadlessSurfaceEXT( const vk::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_IOS_MVK - template - Result createIOSSurfaceMVK( const vk::IOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = + PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = + PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = + PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( + vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = + PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = + PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = + PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = + PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = + PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = + PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = + PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = + PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = + PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = + PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = + PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = + PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = + PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkEnumeratePhysicalDeviceGroups = + PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetPhysicalDeviceFeatures2 = + PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = + PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = + PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = + PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + vkCreatePrivateDataSlot = + PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = + PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = + PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = + PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = + PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = + PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = + PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = + PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = + PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = + PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = + PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = + PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = + PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = + PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = + PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = + PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( + vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = + PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkAcquireNextImage2KHR = + PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( + vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = + PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = + PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = + PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = + PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = + PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - template - Result createImagePipeSurfaceFUCHSIA( const vk::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - template - Result createMacOSSurfaceMVK( const vk::MacOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = + PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - template - Result createMetalSurfaceEXT( const vk::MetalSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = + PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_GGP - template - Result createStreamDescriptorSurfaceGGP( const vk::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = + PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = + PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = + PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = + PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = + PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + vkCreateVideoSessionKHR = + PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( + vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = + PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = + PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = + PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = + PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = + PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = + PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( + vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); #endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - template - Result createViSurfaceNN( const vk::ViSurfaceCreateInfoNN* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = + PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = + PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); #endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - Result createWaylandSurfaceKHR( const vk::WaylandSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - Result createWin32SurfaceKHR( const vk::Win32SurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - template - Result createXcbSurfaceKHR( const vk::XcbSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = + PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - Result createXlibSurfaceKHR( const vk::XlibSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = + PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = + PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - template - void debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroySurfaceKHR( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySurfaceKHR( vk::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void destroy( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( vk::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, vk::PhysicalDevice* pPhysicalDevices, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = Dispatch() ) const; - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const vk::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = Dispatch() ) const; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = Dispatch() ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const - { - return m_instance; - } - - explicit operator bool() const - { - return m_instance != VK_NULL_HANDLE; - } - - bool operator!() const - { - return m_instance == VK_NULL_HANDLE; - } + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = + PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = + PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = + PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - private: - VkInstance m_instance; - }; - static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = + PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = + PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ - template <> - struct cpp_type - { - using type = Instance; - }; +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = + PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; - using UniqueInstance = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - - template - Result createInstance( const vk::InstanceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Instance* pInstance, Dispatch const &d = Dispatch() ); -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ); -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ); -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ); -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = Dispatch() ); - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d = Dispatch() ); -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = Dispatch() ); - template, typename Dispatch = DispatchLoaderDefault> - typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = Dispatch() ); -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - ResultValueType::type enumerateInstanceVersion(Dispatch const &d = Dispatch() ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - namespace layout - { - struct GeometryTrianglesNV - { - protected: - GeometryTrianglesNV( vk::Buffer vertexData_ = vk::Buffer(), - vk::DeviceSize vertexOffset_ = 0, - uint32_t vertexCount_ = 0, - vk::DeviceSize vertexStride_ = 0, - vk::Format vertexFormat_ = vk::Format::eUndefined, - vk::Buffer indexData_ = vk::Buffer(), - vk::DeviceSize indexOffset_ = 0, - uint32_t indexCount_ = 0, - vk::IndexType indexType_ = vk::IndexType::eUint16, - vk::Buffer transformData_ = vk::Buffer(), - vk::DeviceSize transformOffset_ = 0 ) - : vertexData( vertexData_ ) - , vertexOffset( vertexOffset_ ) - , vertexCount( vertexCount_ ) - , vertexStride( vertexStride_ ) - , vertexFormat( vertexFormat_ ) - , indexData( indexData_ ) - , indexOffset( indexOffset_ ) - , indexCount( indexCount_ ) - , indexType( indexType_ ) - , transformData( transformData_ ) - , transformOffset( transformOffset_ ) - {} - - GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCreateDebugUtilsMessengerEXT = + PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = + PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = + PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( + vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = + PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( + vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = + PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - public: - vk::StructureType sType = StructureType::eGeometryTrianglesNV; - const void* pNext = nullptr; - vk::Buffer vertexData; - vk::DeviceSize vertexOffset; - uint32_t vertexCount; - vk::DeviceSize vertexStride; - vk::Format vertexFormat; - vk::Buffer indexData; - vk::DeviceSize indexOffset; - uint32_t indexCount; - vk::IndexType indexType; - vk::Buffer transformData; - vk::DeviceSize transformOffset; - }; - static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "layout struct and wrapper have different size!" ); - } +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = + PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct GeometryTrianglesNV : public layout::GeometryTrianglesNV - { - GeometryTrianglesNV( vk::Buffer vertexData_ = vk::Buffer(), - vk::DeviceSize vertexOffset_ = 0, - uint32_t vertexCount_ = 0, - vk::DeviceSize vertexStride_ = 0, - vk::Format vertexFormat_ = vk::Format::eUndefined, - vk::Buffer indexData_ = vk::Buffer(), - vk::DeviceSize indexOffset_ = 0, - uint32_t indexCount_ = 0, - vk::IndexType indexType_ = vk::IndexType::eUint16, - vk::Buffer transformData_ = vk::Buffer(), - vk::DeviceSize transformOffset_ = 0 ) - : layout::GeometryTrianglesNV( vertexData_, vertexOffset_, vertexCount_, vertexStride_, vertexFormat_, indexData_, indexOffset_, indexCount_, indexType_, transformData_, transformOffset_ ) - {} + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( + vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) - : layout::GeometryTrianglesNV( rhs ) - {} + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = + PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = + PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( + vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = + PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = + PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = + PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = + PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = + PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - GeometryTrianglesNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - GeometryTrianglesNV & setVertexData( vk::Buffer vertexData_ ) - { - vertexData = vertexData_; - return *this; - } +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = + PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( + vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( + vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = + PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( + vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - GeometryTrianglesNV & setVertexOffset( vk::DeviceSize vertexOffset_ ) - { - vertexOffset = vertexOffset_; - return *this; - } + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = + PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = + PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = + PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( + vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = + PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = + PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = + PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( + vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + } + + void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT + { + VkDevice device = static_cast( deviceCpp ); + + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = + PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = + PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = + PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = + PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = + PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = + PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = + PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = + PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = + PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = + PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = + PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = + PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = + PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( + vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = + PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) - { - vertexCount = vertexCount_; - return *this; - } + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - GeometryTrianglesNV & setVertexStride( vk::DeviceSize vertexStride_ ) - { - vertexStride = vertexStride_; - return *this; - } + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - GeometryTrianglesNV & setVertexFormat( vk::Format vertexFormat_ ) - { - vertexFormat = vertexFormat_; - return *this; - } + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - GeometryTrianglesNV & setIndexData( vk::Buffer indexData_ ) - { - indexData = indexData_; - return *this; - } + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - GeometryTrianglesNV & setIndexOffset( vk::DeviceSize indexOffset_ ) - { - indexOffset = indexOffset_; - return *this; - } - - GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) - { - indexCount = indexCount_; - return *this; - } - - GeometryTrianglesNV & setIndexType( vk::IndexType indexType_ ) - { - indexType = indexType_; - return *this; - } - - GeometryTrianglesNV & setTransformData( vk::Buffer transformData_ ) - { - transformData = transformData_; - return *this; - } - - GeometryTrianglesNV & setTransformOffset( vk::DeviceSize transformOffset_ ) - { - transformOffset = transformOffset_; - return *this; - } - - operator VkGeometryTrianglesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkGeometryTrianglesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( GeometryTrianglesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexData == rhs.vertexData ) - && ( vertexOffset == rhs.vertexOffset ) - && ( vertexCount == rhs.vertexCount ) - && ( vertexStride == rhs.vertexStride ) - && ( vertexFormat == rhs.vertexFormat ) - && ( indexData == rhs.indexData ) - && ( indexOffset == rhs.indexOffset ) - && ( indexCount == rhs.indexCount ) - && ( indexType == rhs.indexType ) - && ( transformData == rhs.transformData ) - && ( transformOffset == rhs.transformOffset ); - } - - bool operator!=( GeometryTrianglesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::GeometryTrianglesNV::sType; - }; - static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct GeometryAABBNV - { - protected: - GeometryAABBNV( vk::Buffer aabbData_ = vk::Buffer(), - uint32_t numAABBs_ = 0, - uint32_t stride_ = 0, - vk::DeviceSize offset_ = 0 ) - : aabbData( aabbData_ ) - , numAABBs( numAABBs_ ) - , stride( stride_ ) - , offset( offset_ ) - {} - - GeometryAABBNV( VkGeometryAABBNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eGeometryAabbNV; - const void* pNext = nullptr; - vk::Buffer aabbData; - uint32_t numAABBs; - uint32_t stride; - vk::DeviceSize offset; - }; - static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "layout struct and wrapper have different size!" ); - } - - struct GeometryAABBNV : public layout::GeometryAABBNV - { - GeometryAABBNV( vk::Buffer aabbData_ = vk::Buffer(), - uint32_t numAABBs_ = 0, - uint32_t stride_ = 0, - vk::DeviceSize offset_ = 0 ) - : layout::GeometryAABBNV( aabbData_, numAABBs_, stride_, offset_ ) - {} - - GeometryAABBNV( VkGeometryAABBNV const & rhs ) - : layout::GeometryAABBNV( rhs ) - {} - - GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - GeometryAABBNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - GeometryAABBNV & setAabbData( vk::Buffer aabbData_ ) - { - aabbData = aabbData_; - return *this; - } - - GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) - { - numAABBs = numAABBs_; - return *this; - } - - GeometryAABBNV & setStride( uint32_t stride_ ) - { - stride = stride_; - return *this; - } - - GeometryAABBNV & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - operator VkGeometryAABBNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkGeometryAABBNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( GeometryAABBNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( aabbData == rhs.aabbData ) - && ( numAABBs == rhs.numAABBs ) - && ( stride == rhs.stride ) - && ( offset == rhs.offset ); - } - - bool operator!=( GeometryAABBNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::GeometryAABBNV::sType; - }; - static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct GeometryDataNV - { - GeometryDataNV( vk::GeometryTrianglesNV triangles_ = vk::GeometryTrianglesNV(), - vk::GeometryAABBNV aabbs_ = vk::GeometryAABBNV() ) - : triangles( triangles_ ) - , aabbs( aabbs_ ) - {} - - GeometryDataNV( VkGeometryDataNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - GeometryDataNV & setTriangles( vk::GeometryTrianglesNV triangles_ ) - { - triangles = triangles_; - return *this; - } - - GeometryDataNV & setAabbs( vk::GeometryAABBNV aabbs_ ) - { - aabbs = aabbs_; - return *this; - } - - operator VkGeometryDataNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkGeometryDataNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( GeometryDataNV const& rhs ) const - { - return ( triangles == rhs.triangles ) - && ( aabbs == rhs.aabbs ); - } - - bool operator!=( GeometryDataNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::GeometryTrianglesNV triangles; - vk::GeometryAABBNV aabbs; - }; - static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct GeometryNV - { - protected: - GeometryNV( vk::GeometryTypeNV geometryType_ = vk::GeometryTypeNV::eTriangles, - vk::GeometryDataNV geometry_ = vk::GeometryDataNV(), - vk::GeometryFlagsNV flags_ = vk::GeometryFlagsNV() ) - : geometryType( geometryType_ ) - , geometry( geometry_ ) - , flags( flags_ ) - {} - - GeometryNV( VkGeometryNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - GeometryNV& operator=( VkGeometryNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eGeometryNV; - const void* pNext = nullptr; - vk::GeometryTypeNV geometryType; - vk::GeometryDataNV geometry; - vk::GeometryFlagsNV flags; - }; - static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "layout struct and wrapper have different size!" ); - } - - struct GeometryNV : public layout::GeometryNV - { - GeometryNV( vk::GeometryTypeNV geometryType_ = vk::GeometryTypeNV::eTriangles, - vk::GeometryDataNV geometry_ = vk::GeometryDataNV(), - vk::GeometryFlagsNV flags_ = vk::GeometryFlagsNV() ) - : layout::GeometryNV( geometryType_, geometry_, flags_ ) - {} - - GeometryNV( VkGeometryNV const & rhs ) - : layout::GeometryNV( rhs ) - {} - - GeometryNV& operator=( VkGeometryNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - GeometryNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - GeometryNV & setGeometryType( vk::GeometryTypeNV geometryType_ ) - { - geometryType = geometryType_; - return *this; - } - - GeometryNV & setGeometry( vk::GeometryDataNV geometry_ ) - { - geometry = geometry_; - return *this; - } - - GeometryNV & setFlags( vk::GeometryFlagsNV flags_ ) - { - flags = flags_; - return *this; - } - - operator VkGeometryNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkGeometryNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( GeometryNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( geometryType == rhs.geometryType ) - && ( geometry == rhs.geometry ) - && ( flags == rhs.flags ); - } - - bool operator!=( GeometryNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::GeometryNV::sType; - }; - static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct AccelerationStructureInfoNV - { - protected: - AccelerationStructureInfoNV( vk::AccelerationStructureTypeNV type_ = vk::AccelerationStructureTypeNV::eTopLevel, - vk::BuildAccelerationStructureFlagsNV flags_ = vk::BuildAccelerationStructureFlagsNV(), - uint32_t instanceCount_ = 0, - uint32_t geometryCount_ = 0, - const vk::GeometryNV* pGeometries_ = nullptr ) - : type( type_ ) - , flags( flags_ ) - , instanceCount( instanceCount_ ) - , geometryCount( geometryCount_ ) - , pGeometries( pGeometries_ ) - {} - - AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAccelerationStructureInfoNV; - const void* pNext = nullptr; - vk::AccelerationStructureTypeNV type; - vk::BuildAccelerationStructureFlagsNV flags; - uint32_t instanceCount; - uint32_t geometryCount; - const vk::GeometryNV* pGeometries; - }; - static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct AccelerationStructureInfoNV : public layout::AccelerationStructureInfoNV - { - AccelerationStructureInfoNV( vk::AccelerationStructureTypeNV type_ = vk::AccelerationStructureTypeNV::eTopLevel, - vk::BuildAccelerationStructureFlagsNV flags_ = vk::BuildAccelerationStructureFlagsNV(), - uint32_t instanceCount_ = 0, - uint32_t geometryCount_ = 0, - const vk::GeometryNV* pGeometries_ = nullptr ) - : layout::AccelerationStructureInfoNV( type_, flags_, instanceCount_, geometryCount_, pGeometries_ ) - {} - - AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) - : layout::AccelerationStructureInfoNV( rhs ) - {} - - AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AccelerationStructureInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AccelerationStructureInfoNV & setType( vk::AccelerationStructureTypeNV type_ ) - { - type = type_; - return *this; - } - - AccelerationStructureInfoNV & setFlags( vk::BuildAccelerationStructureFlagsNV flags_ ) - { - flags = flags_; - return *this; - } - - AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) - { - instanceCount = instanceCount_; - return *this; - } - - AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) - { - geometryCount = geometryCount_; - return *this; - } - - AccelerationStructureInfoNV & setPGeometries( const vk::GeometryNV* pGeometries_ ) - { - pGeometries = pGeometries_; - return *this; - } - - operator VkAccelerationStructureInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAccelerationStructureInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AccelerationStructureInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( instanceCount == rhs.instanceCount ) - && ( geometryCount == rhs.geometryCount ) - && ( pGeometries == rhs.pGeometries ); - } - - bool operator!=( AccelerationStructureInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AccelerationStructureInfoNV::sType; - }; - static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct AccelerationStructureCreateInfoNV - { - protected: - AccelerationStructureCreateInfoNV( vk::DeviceSize compactedSize_ = 0, - vk::AccelerationStructureInfoNV info_ = vk::AccelerationStructureInfoNV() ) - : compactedSize( compactedSize_ ) - , info( info_ ) - {} - - AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; - const void* pNext = nullptr; - vk::DeviceSize compactedSize; - vk::AccelerationStructureInfoNV info; - }; - static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct AccelerationStructureCreateInfoNV : public layout::AccelerationStructureCreateInfoNV - { - AccelerationStructureCreateInfoNV( vk::DeviceSize compactedSize_ = 0, - vk::AccelerationStructureInfoNV info_ = vk::AccelerationStructureInfoNV() ) - : layout::AccelerationStructureCreateInfoNV( compactedSize_, info_ ) - {} - - AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) - : layout::AccelerationStructureCreateInfoNV( rhs ) - {} - - AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AccelerationStructureCreateInfoNV & setCompactedSize( vk::DeviceSize compactedSize_ ) - { - compactedSize = compactedSize_; - return *this; - } - - AccelerationStructureCreateInfoNV & setInfo( vk::AccelerationStructureInfoNV info_ ) - { - info = info_; - return *this; - } - - operator VkAccelerationStructureCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAccelerationStructureCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compactedSize == rhs.compactedSize ) - && ( info == rhs.info ); - } - - bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AccelerationStructureCreateInfoNV::sType; - }; - static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct AccelerationStructureMemoryRequirementsInfoNV - { - protected: - AccelerationStructureMemoryRequirementsInfoNV( vk::AccelerationStructureMemoryRequirementsTypeNV type_ = vk::AccelerationStructureMemoryRequirementsTypeNV::eObject, - vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV() ) - : type( type_ ) - , accelerationStructure( accelerationStructure_ ) - {} - - AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; - const void* pNext = nullptr; - vk::AccelerationStructureMemoryRequirementsTypeNV type; - vk::AccelerationStructureNV accelerationStructure; - }; - static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct AccelerationStructureMemoryRequirementsInfoNV : public layout::AccelerationStructureMemoryRequirementsInfoNV - { - AccelerationStructureMemoryRequirementsInfoNV( vk::AccelerationStructureMemoryRequirementsTypeNV type_ = vk::AccelerationStructureMemoryRequirementsTypeNV::eObject, - vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV() ) - : layout::AccelerationStructureMemoryRequirementsInfoNV( type_, accelerationStructure_ ) - {} - - AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) - : layout::AccelerationStructureMemoryRequirementsInfoNV( rhs ) - {} - - AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AccelerationStructureMemoryRequirementsInfoNV & setType( vk::AccelerationStructureMemoryRequirementsTypeNV type_ ) - { - type = type_; - return *this; - } - - AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( vk::AccelerationStructureNV accelerationStructure_ ) - { - accelerationStructure = accelerationStructure_; - return *this; - } - - operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAccelerationStructureMemoryRequirementsInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( accelerationStructure == rhs.accelerationStructure ); - } - - bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AccelerationStructureMemoryRequirementsInfoNV::sType; - }; - static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct AcquireNextImageInfoKHR - { - protected: - AcquireNextImageInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(), - uint64_t timeout_ = 0, - vk::Semaphore semaphore_ = vk::Semaphore(), - vk::Fence fence_ = vk::Fence(), - uint32_t deviceMask_ = 0 ) - : swapchain( swapchain_ ) - , timeout( timeout_ ) - , semaphore( semaphore_ ) - , fence( fence_ ) - , deviceMask( deviceMask_ ) - {} - - AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAcquireNextImageInfoKHR; - const void* pNext = nullptr; - vk::SwapchainKHR swapchain; - uint64_t timeout; - vk::Semaphore semaphore; - vk::Fence fence; - uint32_t deviceMask; - }; - static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct AcquireNextImageInfoKHR : public layout::AcquireNextImageInfoKHR - { - AcquireNextImageInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(), - uint64_t timeout_ = 0, - vk::Semaphore semaphore_ = vk::Semaphore(), - vk::Fence fence_ = vk::Fence(), - uint32_t deviceMask_ = 0 ) - : layout::AcquireNextImageInfoKHR( swapchain_, timeout_, semaphore_, fence_, deviceMask_ ) - {} - - AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) - : layout::AcquireNextImageInfoKHR( rhs ) - {} - - AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AcquireNextImageInfoKHR & setSwapchain( vk::SwapchainKHR swapchain_ ) - { - swapchain = swapchain_; - return *this; - } - - AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) - { - timeout = timeout_; - return *this; - } - - AcquireNextImageInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) - { - semaphore = semaphore_; - return *this; - } - - AcquireNextImageInfoKHR & setFence( vk::Fence fence_ ) - { - fence = fence_; - return *this; - } - - AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) - { - deviceMask = deviceMask_; - return *this; - } - - operator VkAcquireNextImageInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAcquireNextImageInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AcquireNextImageInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ) - && ( timeout == rhs.timeout ) - && ( semaphore == rhs.semaphore ) - && ( fence == rhs.fence ) - && ( deviceMask == rhs.deviceMask ); - } - - bool operator!=( AcquireNextImageInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AcquireNextImageInfoKHR::sType; - }; - static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct AllocationCallbacks - { - AllocationCallbacks( void* pUserData_ = nullptr, - PFN_vkAllocationFunction pfnAllocation_ = nullptr, - PFN_vkReallocationFunction pfnReallocation_ = nullptr, - PFN_vkFreeFunction pfnFree_ = nullptr, - PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, - PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr ) - : pUserData( pUserData_ ) - , pfnAllocation( pfnAllocation_ ) - , pfnReallocation( pfnReallocation_ ) - , pfnFree( pfnFree_ ) - , pfnInternalAllocation( pfnInternalAllocation_ ) - , pfnInternalFree( pfnInternalFree_ ) - {} - - AllocationCallbacks( VkAllocationCallbacks const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AllocationCallbacks & setPUserData( void* pUserData_ ) - { - pUserData = pUserData_; - return *this; - } - - AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) - { - pfnAllocation = pfnAllocation_; - return *this; - } - - AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) - { - pfnReallocation = pfnReallocation_; - return *this; - } - - AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) - { - pfnFree = pfnFree_; - return *this; - } - - AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) - { - pfnInternalAllocation = pfnInternalAllocation_; - return *this; - } - - AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) - { - pfnInternalFree = pfnInternalFree_; - return *this; - } - - operator VkAllocationCallbacks const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAllocationCallbacks &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AllocationCallbacks const& rhs ) const - { - return ( pUserData == rhs.pUserData ) - && ( pfnAllocation == rhs.pfnAllocation ) - && ( pfnReallocation == rhs.pfnReallocation ) - && ( pfnFree == rhs.pfnFree ) - && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) - && ( pfnInternalFree == rhs.pfnInternalFree ); - } - - bool operator!=( AllocationCallbacks const& rhs ) const - { - return !operator==( rhs ); - } - - public: - void* pUserData; - PFN_vkAllocationFunction pfnAllocation; - PFN_vkReallocationFunction pfnReallocation; - PFN_vkFreeFunction pfnFree; - PFN_vkInternalAllocationNotification pfnInternalAllocation; - PFN_vkInternalFreeNotification pfnInternalFree; - }; - static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ComponentMapping - { - ComponentMapping( vk::ComponentSwizzle r_ = vk::ComponentSwizzle::eIdentity, - vk::ComponentSwizzle g_ = vk::ComponentSwizzle::eIdentity, - vk::ComponentSwizzle b_ = vk::ComponentSwizzle::eIdentity, - vk::ComponentSwizzle a_ = vk::ComponentSwizzle::eIdentity ) - : r( r_ ) - , g( g_ ) - , b( b_ ) - , a( a_ ) - {} - - ComponentMapping( VkComponentMapping const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ComponentMapping& operator=( VkComponentMapping const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ComponentMapping & setR( vk::ComponentSwizzle r_ ) - { - r = r_; - return *this; - } - - ComponentMapping & setG( vk::ComponentSwizzle g_ ) - { - g = g_; - return *this; - } - - ComponentMapping & setB( vk::ComponentSwizzle b_ ) - { - b = b_; - return *this; - } - - ComponentMapping & setA( vk::ComponentSwizzle a_ ) - { - a = a_; - return *this; - } - - operator VkComponentMapping const&() const - { - return *reinterpret_cast( this ); - } - - operator VkComponentMapping &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ComponentMapping const& rhs ) const - { - return ( r == rhs.r ) - && ( g == rhs.g ) - && ( b == rhs.b ) - && ( a == rhs.a ); - } - - bool operator!=( ComponentMapping const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ComponentSwizzle r; - vk::ComponentSwizzle g; - vk::ComponentSwizzle b; - vk::ComponentSwizzle a; - }; - static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct AndroidHardwareBufferFormatPropertiesANDROID - { - protected: - AndroidHardwareBufferFormatPropertiesANDROID( vk::Format format_ = vk::Format::eUndefined, - uint64_t externalFormat_ = 0, - vk::FormatFeatureFlags formatFeatures_ = vk::FormatFeatureFlags(), - vk::ComponentMapping samplerYcbcrConversionComponents_ = vk::ComponentMapping(), - vk::SamplerYcbcrModelConversion suggestedYcbcrModel_ = vk::SamplerYcbcrModelConversion::eRgbIdentity, - vk::SamplerYcbcrRange suggestedYcbcrRange_ = vk::SamplerYcbcrRange::eItuFull, - vk::ChromaLocation suggestedXChromaOffset_ = vk::ChromaLocation::eCositedEven, - vk::ChromaLocation suggestedYChromaOffset_ = vk::ChromaLocation::eCositedEven ) - : format( format_ ) - , externalFormat( externalFormat_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) - {} - - AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; - void* pNext = nullptr; - vk::Format format; - uint64_t externalFormat; - vk::FormatFeatureFlags formatFeatures; - vk::ComponentMapping samplerYcbcrConversionComponents; - vk::SamplerYcbcrModelConversion suggestedYcbcrModel; - vk::SamplerYcbcrRange suggestedYcbcrRange; - vk::ChromaLocation suggestedXChromaOffset; - vk::ChromaLocation suggestedYChromaOffset; - }; - static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "layout struct and wrapper have different size!" ); - } - - struct AndroidHardwareBufferFormatPropertiesANDROID : public layout::AndroidHardwareBufferFormatPropertiesANDROID - { - operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAndroidHardwareBufferFormatPropertiesANDROID &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( externalFormat == rhs.externalFormat ) - && ( formatFeatures == rhs.formatFeatures ) - && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) - && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) - && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) - && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) - && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); - } - - bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AndroidHardwareBufferFormatPropertiesANDROID::sType; - }; - static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct AndroidHardwareBufferPropertiesANDROID - { - protected: - AndroidHardwareBufferPropertiesANDROID( vk::DeviceSize allocationSize_ = 0, - uint32_t memoryTypeBits_ = 0 ) - : allocationSize( allocationSize_ ) - , memoryTypeBits( memoryTypeBits_ ) - {} - - AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; - void* pNext = nullptr; - vk::DeviceSize allocationSize; - uint32_t memoryTypeBits; - }; - static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "layout struct and wrapper have different size!" ); - } - - struct AndroidHardwareBufferPropertiesANDROID : public layout::AndroidHardwareBufferPropertiesANDROID - { - operator VkAndroidHardwareBufferPropertiesANDROID const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAndroidHardwareBufferPropertiesANDROID &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AndroidHardwareBufferPropertiesANDROID::sType; - }; - static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct AndroidHardwareBufferUsageANDROID - { - protected: - AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = 0 ) - : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) - {} - - AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; - void* pNext = nullptr; - uint64_t androidHardwareBufferUsage; - }; - static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "layout struct and wrapper have different size!" ); - } - - struct AndroidHardwareBufferUsageANDROID : public layout::AndroidHardwareBufferUsageANDROID - { - operator VkAndroidHardwareBufferUsageANDROID const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAndroidHardwareBufferUsageANDROID &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); - } - - bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AndroidHardwareBufferUsageANDROID::sType; - }; - static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct AndroidSurfaceCreateInfoKHR - { - protected: - AndroidSurfaceCreateInfoKHR( vk::AndroidSurfaceCreateFlagsKHR flags_ = vk::AndroidSurfaceCreateFlagsKHR(), - struct ANativeWindow* window_ = nullptr ) - : flags( flags_ ) - , window( window_ ) - {} - - AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; - const void* pNext = nullptr; - vk::AndroidSurfaceCreateFlagsKHR flags; - struct ANativeWindow* window; - }; - static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct AndroidSurfaceCreateInfoKHR : public layout::AndroidSurfaceCreateInfoKHR - { - AndroidSurfaceCreateInfoKHR( vk::AndroidSurfaceCreateFlagsKHR flags_ = vk::AndroidSurfaceCreateFlagsKHR(), - struct ANativeWindow* window_ = nullptr ) - : layout::AndroidSurfaceCreateInfoKHR( flags_, window_ ) - {} - - AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) - : layout::AndroidSurfaceCreateInfoKHR( rhs ) - {} - - AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AndroidSurfaceCreateInfoKHR & setFlags( vk::AndroidSurfaceCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) - { - window = window_; - return *this; - } - - operator VkAndroidSurfaceCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAndroidSurfaceCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( window == rhs.window ); - } - - bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AndroidSurfaceCreateInfoKHR::sType; - }; - static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - namespace layout - { - struct ApplicationInfo - { - protected: - ApplicationInfo( const char* pApplicationName_ = nullptr, - uint32_t applicationVersion_ = 0, - const char* pEngineName_ = nullptr, - uint32_t engineVersion_ = 0, - uint32_t apiVersion_ = 0 ) - : pApplicationName( pApplicationName_ ) - , applicationVersion( applicationVersion_ ) - , pEngineName( pEngineName_ ) - , engineVersion( engineVersion_ ) - , apiVersion( apiVersion_ ) - {} - - ApplicationInfo( VkApplicationInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ApplicationInfo& operator=( VkApplicationInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eApplicationInfo; - const void* pNext = nullptr; - const char* pApplicationName; - uint32_t applicationVersion; - const char* pEngineName; - uint32_t engineVersion; - uint32_t apiVersion; - }; - static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "layout struct and wrapper have different size!" ); - } - - struct ApplicationInfo : public layout::ApplicationInfo - { - ApplicationInfo( const char* pApplicationName_ = nullptr, - uint32_t applicationVersion_ = 0, - const char* pEngineName_ = nullptr, - uint32_t engineVersion_ = 0, - uint32_t apiVersion_ = 0 ) - : layout::ApplicationInfo( pApplicationName_, applicationVersion_, pEngineName_, engineVersion_, apiVersion_ ) - {} - - ApplicationInfo( VkApplicationInfo const & rhs ) - : layout::ApplicationInfo( rhs ) - {} - - ApplicationInfo& operator=( VkApplicationInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ApplicationInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) - { - pApplicationName = pApplicationName_; - return *this; - } - - ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) - { - applicationVersion = applicationVersion_; - return *this; - } - - ApplicationInfo & setPEngineName( const char* pEngineName_ ) - { - pEngineName = pEngineName_; - return *this; - } - - ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) - { - engineVersion = engineVersion_; - return *this; - } - - ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) - { - apiVersion = apiVersion_; - return *this; - } - - operator VkApplicationInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkApplicationInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ApplicationInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pApplicationName == rhs.pApplicationName ) - && ( applicationVersion == rhs.applicationVersion ) - && ( pEngineName == rhs.pEngineName ) - && ( engineVersion == rhs.engineVersion ) - && ( apiVersion == rhs.apiVersion ); - } - - bool operator!=( ApplicationInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ApplicationInfo::sType; - }; - static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct AttachmentDescription - { - AttachmentDescription( vk::AttachmentDescriptionFlags flags_ = vk::AttachmentDescriptionFlags(), - vk::Format format_ = vk::Format::eUndefined, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::AttachmentLoadOp loadOp_ = vk::AttachmentLoadOp::eLoad, - vk::AttachmentStoreOp storeOp_ = vk::AttachmentStoreOp::eStore, - vk::AttachmentLoadOp stencilLoadOp_ = vk::AttachmentLoadOp::eLoad, - vk::AttachmentStoreOp stencilStoreOp_ = vk::AttachmentStoreOp::eStore, - vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined, - vk::ImageLayout finalLayout_ = vk::ImageLayout::eUndefined ) - : flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) - {} - - AttachmentDescription( VkAttachmentDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AttachmentDescription & setFlags( vk::AttachmentDescriptionFlags flags_ ) - { - flags = flags_; - return *this; - } - - AttachmentDescription & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - AttachmentDescription & setSamples( vk::SampleCountFlagBits samples_ ) - { - samples = samples_; - return *this; - } - - AttachmentDescription & setLoadOp( vk::AttachmentLoadOp loadOp_ ) - { - loadOp = loadOp_; - return *this; - } - - AttachmentDescription & setStoreOp( vk::AttachmentStoreOp storeOp_ ) - { - storeOp = storeOp_; - return *this; - } - - AttachmentDescription & setStencilLoadOp( vk::AttachmentLoadOp stencilLoadOp_ ) - { - stencilLoadOp = stencilLoadOp_; - return *this; - } - - AttachmentDescription & setStencilStoreOp( vk::AttachmentStoreOp stencilStoreOp_ ) - { - stencilStoreOp = stencilStoreOp_; - return *this; - } - - AttachmentDescription & setInitialLayout( vk::ImageLayout initialLayout_ ) - { - initialLayout = initialLayout_; - return *this; - } - - AttachmentDescription & setFinalLayout( vk::ImageLayout finalLayout_ ) - { - finalLayout = finalLayout_; - return *this; - } - - operator VkAttachmentDescription const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAttachmentDescription &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AttachmentDescription const& rhs ) const - { - return ( flags == rhs.flags ) - && ( format == rhs.format ) - && ( samples == rhs.samples ) - && ( loadOp == rhs.loadOp ) - && ( storeOp == rhs.storeOp ) - && ( stencilLoadOp == rhs.stencilLoadOp ) - && ( stencilStoreOp == rhs.stencilStoreOp ) - && ( initialLayout == rhs.initialLayout ) - && ( finalLayout == rhs.finalLayout ); - } - - bool operator!=( AttachmentDescription const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::AttachmentDescriptionFlags flags; - vk::Format format; - vk::SampleCountFlagBits samples; - vk::AttachmentLoadOp loadOp; - vk::AttachmentStoreOp storeOp; - vk::AttachmentLoadOp stencilLoadOp; - vk::AttachmentStoreOp stencilStoreOp; - vk::ImageLayout initialLayout; - vk::ImageLayout finalLayout; - }; - static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct AttachmentDescription2KHR - { - protected: - AttachmentDescription2KHR( vk::AttachmentDescriptionFlags flags_ = vk::AttachmentDescriptionFlags(), - vk::Format format_ = vk::Format::eUndefined, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::AttachmentLoadOp loadOp_ = vk::AttachmentLoadOp::eLoad, - vk::AttachmentStoreOp storeOp_ = vk::AttachmentStoreOp::eStore, - vk::AttachmentLoadOp stencilLoadOp_ = vk::AttachmentLoadOp::eLoad, - vk::AttachmentStoreOp stencilStoreOp_ = vk::AttachmentStoreOp::eStore, - vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined, - vk::ImageLayout finalLayout_ = vk::ImageLayout::eUndefined ) - : flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) - {} - - AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAttachmentDescription2KHR; - const void* pNext = nullptr; - vk::AttachmentDescriptionFlags flags; - vk::Format format; - vk::SampleCountFlagBits samples; - vk::AttachmentLoadOp loadOp; - vk::AttachmentStoreOp storeOp; - vk::AttachmentLoadOp stencilLoadOp; - vk::AttachmentStoreOp stencilStoreOp; - vk::ImageLayout initialLayout; - vk::ImageLayout finalLayout; - }; - static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "layout struct and wrapper have different size!" ); - } - - struct AttachmentDescription2KHR : public layout::AttachmentDescription2KHR - { - AttachmentDescription2KHR( vk::AttachmentDescriptionFlags flags_ = vk::AttachmentDescriptionFlags(), - vk::Format format_ = vk::Format::eUndefined, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::AttachmentLoadOp loadOp_ = vk::AttachmentLoadOp::eLoad, - vk::AttachmentStoreOp storeOp_ = vk::AttachmentStoreOp::eStore, - vk::AttachmentLoadOp stencilLoadOp_ = vk::AttachmentLoadOp::eLoad, - vk::AttachmentStoreOp stencilStoreOp_ = vk::AttachmentStoreOp::eStore, - vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined, - vk::ImageLayout finalLayout_ = vk::ImageLayout::eUndefined ) - : layout::AttachmentDescription2KHR( flags_, format_, samples_, loadOp_, storeOp_, stencilLoadOp_, stencilStoreOp_, initialLayout_, finalLayout_ ) - {} - - AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) - : layout::AttachmentDescription2KHR( rhs ) - {} - - AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AttachmentDescription2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AttachmentDescription2KHR & setFlags( vk::AttachmentDescriptionFlags flags_ ) - { - flags = flags_; - return *this; - } - - AttachmentDescription2KHR & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - AttachmentDescription2KHR & setSamples( vk::SampleCountFlagBits samples_ ) - { - samples = samples_; - return *this; - } - - AttachmentDescription2KHR & setLoadOp( vk::AttachmentLoadOp loadOp_ ) - { - loadOp = loadOp_; - return *this; - } - - AttachmentDescription2KHR & setStoreOp( vk::AttachmentStoreOp storeOp_ ) - { - storeOp = storeOp_; - return *this; - } - - AttachmentDescription2KHR & setStencilLoadOp( vk::AttachmentLoadOp stencilLoadOp_ ) - { - stencilLoadOp = stencilLoadOp_; - return *this; - } - - AttachmentDescription2KHR & setStencilStoreOp( vk::AttachmentStoreOp stencilStoreOp_ ) - { - stencilStoreOp = stencilStoreOp_; - return *this; - } - - AttachmentDescription2KHR & setInitialLayout( vk::ImageLayout initialLayout_ ) - { - initialLayout = initialLayout_; - return *this; - } - - AttachmentDescription2KHR & setFinalLayout( vk::ImageLayout finalLayout_ ) - { - finalLayout = finalLayout_; - return *this; - } - - operator VkAttachmentDescription2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAttachmentDescription2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AttachmentDescription2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( format == rhs.format ) - && ( samples == rhs.samples ) - && ( loadOp == rhs.loadOp ) - && ( storeOp == rhs.storeOp ) - && ( stencilLoadOp == rhs.stencilLoadOp ) - && ( stencilStoreOp == rhs.stencilStoreOp ) - && ( initialLayout == rhs.initialLayout ) - && ( finalLayout == rhs.finalLayout ); - } - - bool operator!=( AttachmentDescription2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AttachmentDescription2KHR::sType; - }; - static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct AttachmentReference - { - AttachmentReference( uint32_t attachment_ = 0, - vk::ImageLayout layout_ = vk::ImageLayout::eUndefined ) - : attachment( attachment_ ) - , layout( layout_ ) - {} - - AttachmentReference( VkAttachmentReference const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AttachmentReference& operator=( VkAttachmentReference const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AttachmentReference & setAttachment( uint32_t attachment_ ) - { - attachment = attachment_; - return *this; - } - - AttachmentReference & setLayout( vk::ImageLayout layout_ ) - { - layout = layout_; - return *this; - } - - operator VkAttachmentReference const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAttachmentReference &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AttachmentReference const& rhs ) const - { - return ( attachment == rhs.attachment ) - && ( layout == rhs.layout ); - } - - bool operator!=( AttachmentReference const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t attachment; - vk::ImageLayout layout; - }; - static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct AttachmentReference2KHR - { - protected: - AttachmentReference2KHR( uint32_t attachment_ = 0, - vk::ImageLayout layout_ = vk::ImageLayout::eUndefined, - vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags() ) - : attachment( attachment_ ) - , layout( layout_ ) - , aspectMask( aspectMask_ ) - {} - - AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eAttachmentReference2KHR; - const void* pNext = nullptr; - uint32_t attachment; - vk::ImageLayout layout; - vk::ImageAspectFlags aspectMask; - }; - static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "layout struct and wrapper have different size!" ); - } - - struct AttachmentReference2KHR : public layout::AttachmentReference2KHR - { - AttachmentReference2KHR( uint32_t attachment_ = 0, - vk::ImageLayout layout_ = vk::ImageLayout::eUndefined, - vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags() ) - : layout::AttachmentReference2KHR( attachment_, layout_, aspectMask_ ) - {} - - AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) - : layout::AttachmentReference2KHR( rhs ) - {} - - AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AttachmentReference2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - AttachmentReference2KHR & setAttachment( uint32_t attachment_ ) - { - attachment = attachment_; - return *this; - } - - AttachmentReference2KHR & setLayout( vk::ImageLayout layout_ ) - { - layout = layout_; - return *this; - } - - AttachmentReference2KHR & setAspectMask( vk::ImageAspectFlags aspectMask_ ) - { - aspectMask = aspectMask_; - return *this; - } - - operator VkAttachmentReference2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAttachmentReference2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AttachmentReference2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachment == rhs.attachment ) - && ( layout == rhs.layout ) - && ( aspectMask == rhs.aspectMask ); - } - - bool operator!=( AttachmentReference2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::AttachmentReference2KHR::sType; - }; - static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct Extent2D - { - Extent2D( uint32_t width_ = 0, - uint32_t height_ = 0 ) - : width( width_ ) - , height( height_ ) - {} - - Extent2D( VkExtent2D const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Extent2D& operator=( VkExtent2D const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Extent2D & setWidth( uint32_t width_ ) - { - width = width_; - return *this; - } - - Extent2D & setHeight( uint32_t height_ ) - { - height = height_; - return *this; - } - - operator VkExtent2D const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExtent2D &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Extent2D const& rhs ) const - { - return ( width == rhs.width ) - && ( height == rhs.height ); - } - - bool operator!=( Extent2D const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t width; - uint32_t height; - }; - static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SampleLocationEXT - { - SampleLocationEXT( float x_ = 0, - float y_ = 0 ) - : x( x_ ) - , y( y_ ) - {} - - SampleLocationEXT( VkSampleLocationEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SampleLocationEXT & setX( float x_ ) - { - x = x_; - return *this; - } - - SampleLocationEXT & setY( float y_ ) - { - y = y_; - return *this; - } - - operator VkSampleLocationEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSampleLocationEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SampleLocationEXT const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ); - } - - bool operator!=( SampleLocationEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - float x; - float y; - }; - static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SampleLocationsInfoEXT - { - protected: - SampleLocationsInfoEXT( vk::SampleCountFlagBits sampleLocationsPerPixel_ = vk::SampleCountFlagBits::e1, - vk::Extent2D sampleLocationGridSize_ = vk::Extent2D(), - uint32_t sampleLocationsCount_ = 0, - const vk::SampleLocationEXT* pSampleLocations_ = nullptr ) - : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) - , sampleLocationGridSize( sampleLocationGridSize_ ) - , sampleLocationsCount( sampleLocationsCount_ ) - , pSampleLocations( pSampleLocations_ ) - {} - - SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSampleLocationsInfoEXT; - const void* pNext = nullptr; - vk::SampleCountFlagBits sampleLocationsPerPixel; - vk::Extent2D sampleLocationGridSize; - uint32_t sampleLocationsCount; - const vk::SampleLocationEXT* pSampleLocations; - }; - static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct SampleLocationsInfoEXT : public layout::SampleLocationsInfoEXT - { - SampleLocationsInfoEXT( vk::SampleCountFlagBits sampleLocationsPerPixel_ = vk::SampleCountFlagBits::e1, - vk::Extent2D sampleLocationGridSize_ = vk::Extent2D(), - uint32_t sampleLocationsCount_ = 0, - const vk::SampleLocationEXT* pSampleLocations_ = nullptr ) - : layout::SampleLocationsInfoEXT( sampleLocationsPerPixel_, sampleLocationGridSize_, sampleLocationsCount_, pSampleLocations_ ) - {} - - SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) - : layout::SampleLocationsInfoEXT( rhs ) - {} - - SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SampleLocationsInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SampleLocationsInfoEXT & setSampleLocationsPerPixel( vk::SampleCountFlagBits sampleLocationsPerPixel_ ) - { - sampleLocationsPerPixel = sampleLocationsPerPixel_; - return *this; - } - - SampleLocationsInfoEXT & setSampleLocationGridSize( vk::Extent2D sampleLocationGridSize_ ) - { - sampleLocationGridSize = sampleLocationGridSize_; - return *this; - } - - SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) - { - sampleLocationsCount = sampleLocationsCount_; - return *this; - } - - SampleLocationsInfoEXT & setPSampleLocations( const vk::SampleLocationEXT* pSampleLocations_ ) - { - pSampleLocations = pSampleLocations_; - return *this; - } - - operator VkSampleLocationsInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSampleLocationsInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SampleLocationsInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) - && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) - && ( sampleLocationsCount == rhs.sampleLocationsCount ) - && ( pSampleLocations == rhs.pSampleLocations ); - } - - bool operator!=( SampleLocationsInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SampleLocationsInfoEXT::sType; - }; - static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct AttachmentSampleLocationsEXT - { - AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0, - vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) - : attachmentIndex( attachmentIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) - {} - - AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) - { - attachmentIndex = attachmentIndex_; - return *this; - } - - AttachmentSampleLocationsEXT & setSampleLocationsInfo( vk::SampleLocationsInfoEXT sampleLocationsInfo_ ) - { - sampleLocationsInfo = sampleLocationsInfo_; - return *this; - } - - operator VkAttachmentSampleLocationsEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkAttachmentSampleLocationsEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( AttachmentSampleLocationsEXT const& rhs ) const - { - return ( attachmentIndex == rhs.attachmentIndex ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); - } - - bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t attachmentIndex; - vk::SampleLocationsInfoEXT sampleLocationsInfo; - }; - static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BaseInStructure - { - protected: - BaseInStructure() - - {} - - BaseInStructure( VkBaseInStructure const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BaseInStructure& operator=( VkBaseInStructure const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType; - const struct vk::BaseInStructure* pNext = nullptr; - }; - static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "layout struct and wrapper have different size!" ); - } - - struct BaseInStructure : public layout::BaseInStructure - { - BaseInStructure() - - : layout::BaseInStructure( ) - {} - - BaseInStructure( VkBaseInStructure const & rhs ) - : layout::BaseInStructure( rhs ) - {} - - BaseInStructure& operator=( VkBaseInStructure const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BaseInStructure & setPNext( const struct vk::BaseInStructure* pNext_ ) - { - pNext = pNext_; - return *this; - } - - operator VkBaseInStructure const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBaseInStructure &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BaseInStructure const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); - } - - bool operator!=( BaseInStructure const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BaseInStructure::sType; - }; - static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BaseOutStructure - { - protected: - BaseOutStructure() - - {} - - BaseOutStructure( VkBaseOutStructure const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType; - struct vk::BaseOutStructure* pNext = nullptr; - }; - static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "layout struct and wrapper have different size!" ); - } - - struct BaseOutStructure : public layout::BaseOutStructure - { - BaseOutStructure() - - : layout::BaseOutStructure( ) - {} - - BaseOutStructure( VkBaseOutStructure const & rhs ) - : layout::BaseOutStructure( rhs ) - {} - - BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BaseOutStructure & setPNext( struct vk::BaseOutStructure* pNext_ ) - { - pNext = pNext_; - return *this; - } - - operator VkBaseOutStructure const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBaseOutStructure &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BaseOutStructure const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); - } - - bool operator!=( BaseOutStructure const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BaseOutStructure::sType; - }; - static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindAccelerationStructureMemoryInfoNV - { - protected: - BindAccelerationStructureMemoryInfoNV( vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0, - uint32_t deviceIndexCount_ = 0, - const uint32_t* pDeviceIndices_ = nullptr ) - : accelerationStructure( accelerationStructure_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) - {} - - BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; - const void* pNext = nullptr; - vk::AccelerationStructureNV accelerationStructure; - vk::DeviceMemory memory; - vk::DeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; - }; - static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct BindAccelerationStructureMemoryInfoNV : public layout::BindAccelerationStructureMemoryInfoNV - { - BindAccelerationStructureMemoryInfoNV( vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0, - uint32_t deviceIndexCount_ = 0, - const uint32_t* pDeviceIndices_ = nullptr ) - : layout::BindAccelerationStructureMemoryInfoNV( accelerationStructure_, memory_, memoryOffset_, deviceIndexCount_, pDeviceIndices_ ) - {} - - BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) - : layout::BindAccelerationStructureMemoryInfoNV( rhs ) - {} - - BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( vk::AccelerationStructureNV accelerationStructure_ ) - { - accelerationStructure = accelerationStructure_; - return *this; - } - - BindAccelerationStructureMemoryInfoNV & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - BindAccelerationStructureMemoryInfoNV & setMemoryOffset( vk::DeviceSize memoryOffset_ ) - { - memoryOffset = memoryOffset_; - return *this; - } - - BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) - { - deviceIndexCount = deviceIndexCount_; - return *this; - } - - BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) - { - pDeviceIndices = pDeviceIndices_; - return *this; - } - - operator VkBindAccelerationStructureMemoryInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindAccelerationStructureMemoryInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructure == rhs.accelerationStructure ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ); - } - - bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindAccelerationStructureMemoryInfoNV::sType; - }; - static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindBufferMemoryDeviceGroupInfo - { - protected: - BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, - const uint32_t* pDeviceIndices_ = nullptr ) - : deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) - {} - - BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; - const void* pNext = nullptr; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; - }; - static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "layout struct and wrapper have different size!" ); - } - - struct BindBufferMemoryDeviceGroupInfo : public layout::BindBufferMemoryDeviceGroupInfo - { - BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, - const uint32_t* pDeviceIndices_ = nullptr ) - : layout::BindBufferMemoryDeviceGroupInfo( deviceIndexCount_, pDeviceIndices_ ) - {} - - BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) - : layout::BindBufferMemoryDeviceGroupInfo( rhs ) - {} - - BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) - { - deviceIndexCount = deviceIndexCount_; - return *this; - } - - BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) - { - pDeviceIndices = pDeviceIndices_; - return *this; - } - - operator VkBindBufferMemoryDeviceGroupInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindBufferMemoryDeviceGroupInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ); - } - - bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindBufferMemoryDeviceGroupInfo::sType; - }; - static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindBufferMemoryInfo - { - protected: - BindBufferMemoryInfo( vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0 ) - : buffer( buffer_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - {} - - BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindBufferMemoryInfo; - const void* pNext = nullptr; - vk::Buffer buffer; - vk::DeviceMemory memory; - vk::DeviceSize memoryOffset; - }; - static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "layout struct and wrapper have different size!" ); - } - - struct BindBufferMemoryInfo : public layout::BindBufferMemoryInfo - { - BindBufferMemoryInfo( vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0 ) - : layout::BindBufferMemoryInfo( buffer_, memory_, memoryOffset_ ) - {} - - BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) - : layout::BindBufferMemoryInfo( rhs ) - {} - - BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindBufferMemoryInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindBufferMemoryInfo & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - BindBufferMemoryInfo & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - BindBufferMemoryInfo & setMemoryOffset( vk::DeviceSize memoryOffset_ ) - { - memoryOffset = memoryOffset_; - return *this; - } - - operator VkBindBufferMemoryInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindBufferMemoryInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindBufferMemoryInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ); - } - - bool operator!=( BindBufferMemoryInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindBufferMemoryInfo::sType; - }; - static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct Offset2D - { - Offset2D( int32_t x_ = 0, - int32_t y_ = 0 ) - : x( x_ ) - , y( y_ ) - {} - - Offset2D( VkOffset2D const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Offset2D& operator=( VkOffset2D const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Offset2D & setX( int32_t x_ ) - { - x = x_; - return *this; - } - - Offset2D & setY( int32_t y_ ) - { - y = y_; - return *this; - } - - operator VkOffset2D const&() const - { - return *reinterpret_cast( this ); - } - - operator VkOffset2D &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Offset2D const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ); - } - - bool operator!=( Offset2D const& rhs ) const - { - return !operator==( rhs ); - } - - public: - int32_t x; - int32_t y; - }; - static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct Rect2D - { - Rect2D( vk::Offset2D offset_ = vk::Offset2D(), - vk::Extent2D extent_ = vk::Extent2D() ) - : offset( offset_ ) - , extent( extent_ ) - {} - - Rect2D( VkRect2D const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Rect2D& operator=( VkRect2D const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Rect2D & setOffset( vk::Offset2D offset_ ) - { - offset = offset_; - return *this; - } - - Rect2D & setExtent( vk::Extent2D extent_ ) - { - extent = extent_; - return *this; - } - - operator VkRect2D const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRect2D &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Rect2D const& rhs ) const - { - return ( offset == rhs.offset ) - && ( extent == rhs.extent ); - } - - bool operator!=( Rect2D const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Offset2D offset; - vk::Extent2D extent; - }; - static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindImageMemoryDeviceGroupInfo - { - protected: - BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, - const uint32_t* pDeviceIndices_ = nullptr, - uint32_t splitInstanceBindRegionCount_ = 0, - const vk::Rect2D* pSplitInstanceBindRegions_ = nullptr ) - : deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) - , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) - , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) - {} - - BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; - const void* pNext = nullptr; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; - uint32_t splitInstanceBindRegionCount; - const vk::Rect2D* pSplitInstanceBindRegions; - }; - static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "layout struct and wrapper have different size!" ); - } - - struct BindImageMemoryDeviceGroupInfo : public layout::BindImageMemoryDeviceGroupInfo - { - BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, - const uint32_t* pDeviceIndices_ = nullptr, - uint32_t splitInstanceBindRegionCount_ = 0, - const vk::Rect2D* pSplitInstanceBindRegions_ = nullptr ) - : layout::BindImageMemoryDeviceGroupInfo( deviceIndexCount_, pDeviceIndices_, splitInstanceBindRegionCount_, pSplitInstanceBindRegions_ ) - {} - - BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) - : layout::BindImageMemoryDeviceGroupInfo( rhs ) - {} - - BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) - { - deviceIndexCount = deviceIndexCount_; - return *this; - } - - BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) - { - pDeviceIndices = pDeviceIndices_; - return *this; - } - - BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) - { - splitInstanceBindRegionCount = splitInstanceBindRegionCount_; - return *this; - } - - BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const vk::Rect2D* pSplitInstanceBindRegions_ ) - { - pSplitInstanceBindRegions = pSplitInstanceBindRegions_; - return *this; - } - - operator VkBindImageMemoryDeviceGroupInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindImageMemoryDeviceGroupInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceIndexCount == rhs.deviceIndexCount ) - && ( pDeviceIndices == rhs.pDeviceIndices ) - && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) - && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); - } - - bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindImageMemoryDeviceGroupInfo::sType; - }; - static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindImageMemoryInfo - { - protected: - BindImageMemoryInfo( vk::Image image_ = vk::Image(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0 ) - : image( image_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - {} - - BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindImageMemoryInfo; - const void* pNext = nullptr; - vk::Image image; - vk::DeviceMemory memory; - vk::DeviceSize memoryOffset; - }; - static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "layout struct and wrapper have different size!" ); - } - - struct BindImageMemoryInfo : public layout::BindImageMemoryInfo - { - BindImageMemoryInfo( vk::Image image_ = vk::Image(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0 ) - : layout::BindImageMemoryInfo( image_, memory_, memoryOffset_ ) - {} - - BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) - : layout::BindImageMemoryInfo( rhs ) - {} - - BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindImageMemoryInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindImageMemoryInfo & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - BindImageMemoryInfo & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - BindImageMemoryInfo & setMemoryOffset( vk::DeviceSize memoryOffset_ ) - { - memoryOffset = memoryOffset_; - return *this; - } - - operator VkBindImageMemoryInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindImageMemoryInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindImageMemoryInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ); - } - - bool operator!=( BindImageMemoryInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindImageMemoryInfo::sType; - }; - static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindImageMemorySwapchainInfoKHR - { - protected: - BindImageMemorySwapchainInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(), - uint32_t imageIndex_ = 0 ) - : swapchain( swapchain_ ) - , imageIndex( imageIndex_ ) - {} - - BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; - const void* pNext = nullptr; - vk::SwapchainKHR swapchain; - uint32_t imageIndex; - }; - static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct BindImageMemorySwapchainInfoKHR : public layout::BindImageMemorySwapchainInfoKHR - { - BindImageMemorySwapchainInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(), - uint32_t imageIndex_ = 0 ) - : layout::BindImageMemorySwapchainInfoKHR( swapchain_, imageIndex_ ) - {} - - BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) - : layout::BindImageMemorySwapchainInfoKHR( rhs ) - {} - - BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindImageMemorySwapchainInfoKHR & setSwapchain( vk::SwapchainKHR swapchain_ ) - { - swapchain = swapchain_; - return *this; - } - - BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) - { - imageIndex = imageIndex_; - return *this; - } - - operator VkBindImageMemorySwapchainInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindImageMemorySwapchainInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ) - && ( imageIndex == rhs.imageIndex ); - } - - bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindImageMemorySwapchainInfoKHR::sType; - }; - static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindImagePlaneMemoryInfo - { - protected: - BindImagePlaneMemoryInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) - : planeAspect( planeAspect_ ) - {} - - BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; - const void* pNext = nullptr; - vk::ImageAspectFlagBits planeAspect; - }; - static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "layout struct and wrapper have different size!" ); - } - - struct BindImagePlaneMemoryInfo : public layout::BindImagePlaneMemoryInfo - { - BindImagePlaneMemoryInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) - : layout::BindImagePlaneMemoryInfo( planeAspect_ ) - {} - - BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) - : layout::BindImagePlaneMemoryInfo( rhs ) - {} - - BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindImagePlaneMemoryInfo & setPlaneAspect( vk::ImageAspectFlagBits planeAspect_ ) - { - planeAspect = planeAspect_; - return *this; - } - - operator VkBindImagePlaneMemoryInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindImagePlaneMemoryInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindImagePlaneMemoryInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); - } - - bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindImagePlaneMemoryInfo::sType; - }; - static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseMemoryBind - { - SparseMemoryBind( vk::DeviceSize resourceOffset_ = 0, - vk::DeviceSize size_ = 0, - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0, - vk::SparseMemoryBindFlags flags_ = vk::SparseMemoryBindFlags() ) - : resourceOffset( resourceOffset_ ) - , size( size_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) - {} - - SparseMemoryBind( VkSparseMemoryBind const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SparseMemoryBind & setResourceOffset( vk::DeviceSize resourceOffset_ ) - { - resourceOffset = resourceOffset_; - return *this; - } - - SparseMemoryBind & setSize( vk::DeviceSize size_ ) - { - size = size_; - return *this; - } - - SparseMemoryBind & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - SparseMemoryBind & setMemoryOffset( vk::DeviceSize memoryOffset_ ) - { - memoryOffset = memoryOffset_; - return *this; - } - - SparseMemoryBind & setFlags( vk::SparseMemoryBindFlags flags_ ) - { - flags = flags_; - return *this; - } - - operator VkSparseMemoryBind const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseMemoryBind &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseMemoryBind const& rhs ) const - { - return ( resourceOffset == rhs.resourceOffset ) - && ( size == rhs.size ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( flags == rhs.flags ); - } - - bool operator!=( SparseMemoryBind const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DeviceSize resourceOffset; - vk::DeviceSize size; - vk::DeviceMemory memory; - vk::DeviceSize memoryOffset; - vk::SparseMemoryBindFlags flags; - }; - static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseBufferMemoryBindInfo - { - SparseBufferMemoryBindInfo( vk::Buffer buffer_ = vk::Buffer(), - uint32_t bindCount_ = 0, - const vk::SparseMemoryBind* pBinds_ = nullptr ) - : buffer( buffer_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) - {} - - SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SparseBufferMemoryBindInfo & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) - { - bindCount = bindCount_; - return *this; - } - - SparseBufferMemoryBindInfo & setPBinds( const vk::SparseMemoryBind* pBinds_ ) - { - pBinds = pBinds_; - return *this; - } - - operator VkSparseBufferMemoryBindInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseBufferMemoryBindInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseBufferMemoryBindInfo const& rhs ) const - { - return ( buffer == rhs.buffer ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); - } - - bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Buffer buffer; - uint32_t bindCount; - const vk::SparseMemoryBind* pBinds; - }; - static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageOpaqueMemoryBindInfo - { - SparseImageOpaqueMemoryBindInfo( vk::Image image_ = vk::Image(), - uint32_t bindCount_ = 0, - const vk::SparseMemoryBind* pBinds_ = nullptr ) - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) - {} - - SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SparseImageOpaqueMemoryBindInfo & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) - { - bindCount = bindCount_; - return *this; - } - - SparseImageOpaqueMemoryBindInfo & setPBinds( const vk::SparseMemoryBind* pBinds_ ) - { - pBinds = pBinds_; - return *this; - } - - operator VkSparseImageOpaqueMemoryBindInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageOpaqueMemoryBindInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const - { - return ( image == rhs.image ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); - } - - bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Image image; - uint32_t bindCount; - const vk::SparseMemoryBind* pBinds; - }; - static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSubresource - { - ImageSubresource( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(), - uint32_t mipLevel_ = 0, - uint32_t arrayLayer_ = 0 ) - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , arrayLayer( arrayLayer_ ) - {} - - ImageSubresource( VkImageSubresource const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageSubresource& operator=( VkImageSubresource const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageSubresource & setAspectMask( vk::ImageAspectFlags aspectMask_ ) - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresource & setMipLevel( uint32_t mipLevel_ ) - { - mipLevel = mipLevel_; - return *this; - } - - ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) - { - arrayLayer = arrayLayer_; - return *this; - } - - operator VkImageSubresource const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresource &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSubresource const& rhs ) const - { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( arrayLayer == rhs.arrayLayer ); - } - - bool operator!=( ImageSubresource const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t arrayLayer; - }; - static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct Offset3D - { - Offset3D( int32_t x_ = 0, - int32_t y_ = 0, - int32_t z_ = 0 ) - : x( x_ ) - , y( y_ ) - , z( z_ ) - {} - - explicit Offset3D( Offset2D const& offset2D, - int32_t z_ = 0 ) - : x( offset2D.x ) - , y( offset2D.y ) - , z( z_ ) - - {} - - Offset3D( VkOffset3D const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Offset3D& operator=( VkOffset3D const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Offset3D & setX( int32_t x_ ) - { - x = x_; - return *this; - } - - Offset3D & setY( int32_t y_ ) - { - y = y_; - return *this; - } - - Offset3D & setZ( int32_t z_ ) - { - z = z_; - return *this; - } - - operator VkOffset3D const&() const - { - return *reinterpret_cast( this ); - } - - operator VkOffset3D &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Offset3D const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); - } - - bool operator!=( Offset3D const& rhs ) const - { - return !operator==( rhs ); - } - - public: - int32_t x; - int32_t y; - int32_t z; - }; - static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct Extent3D - { - Extent3D( uint32_t width_ = 0, - uint32_t height_ = 0, - uint32_t depth_ = 0 ) - : width( width_ ) - , height( height_ ) - , depth( depth_ ) - {} - - explicit Extent3D( Extent2D const& extent2D, - uint32_t depth_ = 0 ) - : width( extent2D.width ) - , height( extent2D.height ) - , depth( depth_ ) - - {} - - Extent3D( VkExtent3D const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Extent3D& operator=( VkExtent3D const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Extent3D & setWidth( uint32_t width_ ) - { - width = width_; - return *this; - } - - Extent3D & setHeight( uint32_t height_ ) - { - height = height_; - return *this; - } - - Extent3D & setDepth( uint32_t depth_ ) - { - depth = depth_; - return *this; - } - - operator VkExtent3D const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExtent3D &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Extent3D const& rhs ) const - { - return ( width == rhs.width ) - && ( height == rhs.height ) - && ( depth == rhs.depth ); - } - - bool operator!=( Extent3D const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t width; - uint32_t height; - uint32_t depth; - }; - static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageMemoryBind - { - SparseImageMemoryBind( vk::ImageSubresource subresource_ = vk::ImageSubresource(), - vk::Offset3D offset_ = vk::Offset3D(), - vk::Extent3D extent_ = vk::Extent3D(), - vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize memoryOffset_ = 0, - vk::SparseMemoryBindFlags flags_ = vk::SparseMemoryBindFlags() ) - : subresource( subresource_ ) - , offset( offset_ ) - , extent( extent_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) - {} - - SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SparseImageMemoryBind & setSubresource( vk::ImageSubresource subresource_ ) - { - subresource = subresource_; - return *this; - } - - SparseImageMemoryBind & setOffset( vk::Offset3D offset_ ) - { - offset = offset_; - return *this; - } - - SparseImageMemoryBind & setExtent( vk::Extent3D extent_ ) - { - extent = extent_; - return *this; - } - - SparseImageMemoryBind & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - SparseImageMemoryBind & setMemoryOffset( vk::DeviceSize memoryOffset_ ) - { - memoryOffset = memoryOffset_; - return *this; - } - - SparseImageMemoryBind & setFlags( vk::SparseMemoryBindFlags flags_ ) - { - flags = flags_; - return *this; - } - - operator VkSparseImageMemoryBind const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryBind &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageMemoryBind const& rhs ) const - { - return ( subresource == rhs.subresource ) - && ( offset == rhs.offset ) - && ( extent == rhs.extent ) - && ( memory == rhs.memory ) - && ( memoryOffset == rhs.memoryOffset ) - && ( flags == rhs.flags ); - } - - bool operator!=( SparseImageMemoryBind const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageSubresource subresource; - vk::Offset3D offset; - vk::Extent3D extent; - vk::DeviceMemory memory; - vk::DeviceSize memoryOffset; - vk::SparseMemoryBindFlags flags; - }; - static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageMemoryBindInfo - { - SparseImageMemoryBindInfo( vk::Image image_ = vk::Image(), - uint32_t bindCount_ = 0, - const vk::SparseImageMemoryBind* pBinds_ = nullptr ) - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) - {} - - SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SparseImageMemoryBindInfo & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) - { - bindCount = bindCount_; - return *this; - } - - SparseImageMemoryBindInfo & setPBinds( const vk::SparseImageMemoryBind* pBinds_ ) - { - pBinds = pBinds_; - return *this; - } - - operator VkSparseImageMemoryBindInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryBindInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageMemoryBindInfo const& rhs ) const - { - return ( image == rhs.image ) - && ( bindCount == rhs.bindCount ) - && ( pBinds == rhs.pBinds ); - } - - bool operator!=( SparseImageMemoryBindInfo const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Image image; - uint32_t bindCount; - const vk::SparseImageMemoryBind* pBinds; - }; - static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BindSparseInfo - { - protected: - BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, - const vk::Semaphore* pWaitSemaphores_ = nullptr, - uint32_t bufferBindCount_ = 0, - const vk::SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, - uint32_t imageOpaqueBindCount_ = 0, - const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, - uint32_t imageBindCount_ = 0, - const vk::SparseImageMemoryBindInfo* pImageBinds_ = nullptr, - uint32_t signalSemaphoreCount_ = 0, - const vk::Semaphore* pSignalSemaphores_ = nullptr ) - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , bufferBindCount( bufferBindCount_ ) - , pBufferBinds( pBufferBinds_ ) - , imageOpaqueBindCount( imageOpaqueBindCount_ ) - , pImageOpaqueBinds( pImageOpaqueBinds_ ) - , imageBindCount( imageBindCount_ ) - , pImageBinds( pImageBinds_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) - {} - - BindSparseInfo( VkBindSparseInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBindSparseInfo; - const void* pNext = nullptr; - uint32_t waitSemaphoreCount; - const vk::Semaphore* pWaitSemaphores; - uint32_t bufferBindCount; - const vk::SparseBufferMemoryBindInfo* pBufferBinds; - uint32_t imageOpaqueBindCount; - const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; - uint32_t imageBindCount; - const vk::SparseImageMemoryBindInfo* pImageBinds; - uint32_t signalSemaphoreCount; - const vk::Semaphore* pSignalSemaphores; - }; - static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "layout struct and wrapper have different size!" ); - } - - struct BindSparseInfo : public layout::BindSparseInfo - { - BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, - const vk::Semaphore* pWaitSemaphores_ = nullptr, - uint32_t bufferBindCount_ = 0, - const vk::SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, - uint32_t imageOpaqueBindCount_ = 0, - const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, - uint32_t imageBindCount_ = 0, - const vk::SparseImageMemoryBindInfo* pImageBinds_ = nullptr, - uint32_t signalSemaphoreCount_ = 0, - const vk::Semaphore* pSignalSemaphores_ = nullptr ) - : layout::BindSparseInfo( waitSemaphoreCount_, pWaitSemaphores_, bufferBindCount_, pBufferBinds_, imageOpaqueBindCount_, pImageOpaqueBinds_, imageBindCount_, pImageBinds_, signalSemaphoreCount_, pSignalSemaphores_ ) - {} - - BindSparseInfo( VkBindSparseInfo const & rhs ) - : layout::BindSparseInfo( rhs ) - {} - - BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BindSparseInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - BindSparseInfo & setPWaitSemaphores( const vk::Semaphore* pWaitSemaphores_ ) - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) - { - bufferBindCount = bufferBindCount_; - return *this; - } - - BindSparseInfo & setPBufferBinds( const vk::SparseBufferMemoryBindInfo* pBufferBinds_ ) - { - pBufferBinds = pBufferBinds_; - return *this; - } - - BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) - { - imageOpaqueBindCount = imageOpaqueBindCount_; - return *this; - } - - BindSparseInfo & setPImageOpaqueBinds( const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) - { - pImageOpaqueBinds = pImageOpaqueBinds_; - return *this; - } - - BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) - { - imageBindCount = imageBindCount_; - return *this; - } - - BindSparseInfo & setPImageBinds( const vk::SparseImageMemoryBindInfo* pImageBinds_ ) - { - pImageBinds = pImageBinds_; - return *this; - } - - BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - BindSparseInfo & setPSignalSemaphores( const vk::Semaphore* pSignalSemaphores_ ) - { - pSignalSemaphores = pSignalSemaphores_; - return *this; - } - - operator VkBindSparseInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBindSparseInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BindSparseInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( bufferBindCount == rhs.bufferBindCount ) - && ( pBufferBinds == rhs.pBufferBinds ) - && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) - && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) - && ( imageBindCount == rhs.imageBindCount ) - && ( pImageBinds == rhs.pImageBinds ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); - } - - bool operator!=( BindSparseInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BindSparseInfo::sType; - }; - static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct BufferCopy - { - BufferCopy( vk::DeviceSize srcOffset_ = 0, - vk::DeviceSize dstOffset_ = 0, - vk::DeviceSize size_ = 0 ) - : srcOffset( srcOffset_ ) - , dstOffset( dstOffset_ ) - , size( size_ ) - {} - - BufferCopy( VkBufferCopy const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferCopy& operator=( VkBufferCopy const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferCopy & setSrcOffset( vk::DeviceSize srcOffset_ ) - { - srcOffset = srcOffset_; - return *this; - } - - BufferCopy & setDstOffset( vk::DeviceSize dstOffset_ ) - { - dstOffset = dstOffset_; - return *this; - } - - BufferCopy & setSize( vk::DeviceSize size_ ) - { - size = size_; - return *this; - } - - operator VkBufferCopy const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferCopy &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferCopy const& rhs ) const - { - return ( srcOffset == rhs.srcOffset ) - && ( dstOffset == rhs.dstOffset ) - && ( size == rhs.size ); - } - - bool operator!=( BufferCopy const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DeviceSize srcOffset; - vk::DeviceSize dstOffset; - vk::DeviceSize size; - }; - static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BufferCreateInfo - { - protected: - BufferCreateInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(), - vk::DeviceSize size_ = 0, - vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(), - vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr ) - : flags( flags_ ) - , size( size_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - {} - - BufferCreateInfo( VkBufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBufferCreateInfo; - const void* pNext = nullptr; - vk::BufferCreateFlags flags; - vk::DeviceSize size; - vk::BufferUsageFlags usage; - vk::SharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - }; - static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct BufferCreateInfo : public layout::BufferCreateInfo - { - BufferCreateInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(), - vk::DeviceSize size_ = 0, - vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(), - vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr ) - : layout::BufferCreateInfo( flags_, size_, usage_, sharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_ ) - {} - - BufferCreateInfo( VkBufferCreateInfo const & rhs ) - : layout::BufferCreateInfo( rhs ) - {} - - BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BufferCreateInfo & setFlags( vk::BufferCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - BufferCreateInfo & setSize( vk::DeviceSize size_ ) - { - size = size_; - return *this; - } - - BufferCreateInfo & setUsage( vk::BufferUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - BufferCreateInfo & setSharingMode( vk::SharingMode sharingMode_ ) - { - sharingMode = sharingMode_; - return *this; - } - - BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - operator VkBufferCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( size == rhs.size ) - && ( usage == rhs.usage ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); - } - - bool operator!=( BufferCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BufferCreateInfo::sType; - }; - static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BufferDeviceAddressCreateInfoEXT - { - protected: - BufferDeviceAddressCreateInfoEXT( vk::DeviceAddress deviceAddress_ = 0 ) - : deviceAddress( deviceAddress_ ) - {} - - BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; - const void* pNext = nullptr; - vk::DeviceAddress deviceAddress; - }; - static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct BufferDeviceAddressCreateInfoEXT : public layout::BufferDeviceAddressCreateInfoEXT - { - BufferDeviceAddressCreateInfoEXT( vk::DeviceAddress deviceAddress_ = 0 ) - : layout::BufferDeviceAddressCreateInfoEXT( deviceAddress_ ) - {} - - BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) - : layout::BufferDeviceAddressCreateInfoEXT( rhs ) - {} - - BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BufferDeviceAddressCreateInfoEXT & setDeviceAddress( vk::DeviceAddress deviceAddress_ ) - { - deviceAddress = deviceAddress_; - return *this; - } - - operator VkBufferDeviceAddressCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferDeviceAddressCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceAddress == rhs.deviceAddress ); - } - - bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BufferDeviceAddressCreateInfoEXT::sType; - }; - static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BufferDeviceAddressInfoEXT - { - protected: - BufferDeviceAddressInfoEXT( vk::Buffer buffer_ = vk::Buffer() ) - : buffer( buffer_ ) - {} - - BufferDeviceAddressInfoEXT( VkBufferDeviceAddressInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferDeviceAddressInfoEXT& operator=( VkBufferDeviceAddressInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBufferDeviceAddressInfoEXT; - const void* pNext = nullptr; - vk::Buffer buffer; - }; - static_assert( sizeof( BufferDeviceAddressInfoEXT ) == sizeof( VkBufferDeviceAddressInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct BufferDeviceAddressInfoEXT : public layout::BufferDeviceAddressInfoEXT - { - BufferDeviceAddressInfoEXT( vk::Buffer buffer_ = vk::Buffer() ) - : layout::BufferDeviceAddressInfoEXT( buffer_ ) - {} - - BufferDeviceAddressInfoEXT( VkBufferDeviceAddressInfoEXT const & rhs ) - : layout::BufferDeviceAddressInfoEXT( rhs ) - {} - - BufferDeviceAddressInfoEXT& operator=( VkBufferDeviceAddressInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferDeviceAddressInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BufferDeviceAddressInfoEXT & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - operator VkBufferDeviceAddressInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferDeviceAddressInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferDeviceAddressInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( BufferDeviceAddressInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BufferDeviceAddressInfoEXT::sType; - }; - static_assert( sizeof( BufferDeviceAddressInfoEXT ) == sizeof( VkBufferDeviceAddressInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSubresourceLayers - { - ImageSubresourceLayers( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(), - uint32_t mipLevel_ = 0, - uint32_t baseArrayLayer_ = 0, - uint32_t layerCount_ = 0 ) - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageSubresourceLayers & setAspectMask( vk::ImageAspectFlags aspectMask_ ) - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) - { - mipLevel = mipLevel_; - return *this; - } - - ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) - { - layerCount = layerCount_; - return *this; - } - - operator VkImageSubresourceLayers const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresourceLayers &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSubresourceLayers const& rhs ) const - { - return ( aspectMask == rhs.aspectMask ) - && ( mipLevel == rhs.mipLevel ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ImageSubresourceLayers const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t baseArrayLayer; - uint32_t layerCount; - }; - static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct BufferImageCopy - { - BufferImageCopy( vk::DeviceSize bufferOffset_ = 0, - uint32_t bufferRowLength_ = 0, - uint32_t bufferImageHeight_ = 0, - vk::ImageSubresourceLayers imageSubresource_ = vk::ImageSubresourceLayers(), - vk::Offset3D imageOffset_ = vk::Offset3D(), - vk::Extent3D imageExtent_ = vk::Extent3D() ) - : bufferOffset( bufferOffset_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) - {} - - BufferImageCopy( VkBufferImageCopy const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferImageCopy & setBufferOffset( vk::DeviceSize bufferOffset_ ) - { - bufferOffset = bufferOffset_; - return *this; - } - - BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) - { - bufferRowLength = bufferRowLength_; - return *this; - } - - BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) - { - bufferImageHeight = bufferImageHeight_; - return *this; - } - - BufferImageCopy & setImageSubresource( vk::ImageSubresourceLayers imageSubresource_ ) - { - imageSubresource = imageSubresource_; - return *this; - } - - BufferImageCopy & setImageOffset( vk::Offset3D imageOffset_ ) - { - imageOffset = imageOffset_; - return *this; - } - - BufferImageCopy & setImageExtent( vk::Extent3D imageExtent_ ) - { - imageExtent = imageExtent_; - return *this; - } - - operator VkBufferImageCopy const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferImageCopy &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferImageCopy const& rhs ) const - { - return ( bufferOffset == rhs.bufferOffset ) - && ( bufferRowLength == rhs.bufferRowLength ) - && ( bufferImageHeight == rhs.bufferImageHeight ) - && ( imageSubresource == rhs.imageSubresource ) - && ( imageOffset == rhs.imageOffset ) - && ( imageExtent == rhs.imageExtent ); - } - - bool operator!=( BufferImageCopy const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DeviceSize bufferOffset; - uint32_t bufferRowLength; - uint32_t bufferImageHeight; - vk::ImageSubresourceLayers imageSubresource; - vk::Offset3D imageOffset; - vk::Extent3D imageExtent; - }; - static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BufferMemoryBarrier - { - protected: - BufferMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - uint32_t srcQueueFamilyIndex_ = 0, - uint32_t dstQueueFamilyIndex_ = 0, - vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceSize offset_ = 0, - vk::DeviceSize size_ = 0 ) - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) - {} - - BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBufferMemoryBarrier; - const void* pNext = nullptr; - vk::AccessFlags srcAccessMask; - vk::AccessFlags dstAccessMask; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - vk::Buffer buffer; - vk::DeviceSize offset; - vk::DeviceSize size; - }; - static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "layout struct and wrapper have different size!" ); - } - - struct BufferMemoryBarrier : public layout::BufferMemoryBarrier - { - BufferMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - uint32_t srcQueueFamilyIndex_ = 0, - uint32_t dstQueueFamilyIndex_ = 0, - vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceSize offset_ = 0, - vk::DeviceSize size_ = 0 ) - : layout::BufferMemoryBarrier( srcAccessMask_, dstAccessMask_, srcQueueFamilyIndex_, dstQueueFamilyIndex_, buffer_, offset_, size_ ) - {} - - BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) - : layout::BufferMemoryBarrier( rhs ) - {} - - BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferMemoryBarrier & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BufferMemoryBarrier & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) - { - srcAccessMask = srcAccessMask_; - return *this; - } - - BufferMemoryBarrier & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) - { - dstAccessMask = dstAccessMask_; - return *this; - } - - BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) - { - srcQueueFamilyIndex = srcQueueFamilyIndex_; - return *this; - } - - BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) - { - dstQueueFamilyIndex = dstQueueFamilyIndex_; - return *this; - } - - BufferMemoryBarrier & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - BufferMemoryBarrier & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - BufferMemoryBarrier & setSize( vk::DeviceSize size_ ) - { - size = size_; - return *this; - } - - operator VkBufferMemoryBarrier const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferMemoryBarrier &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferMemoryBarrier const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( BufferMemoryBarrier const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BufferMemoryBarrier::sType; - }; - static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BufferMemoryRequirementsInfo2 - { - protected: - BufferMemoryRequirementsInfo2( vk::Buffer buffer_ = vk::Buffer() ) - : buffer( buffer_ ) - {} - - BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; - const void* pNext = nullptr; - vk::Buffer buffer; - }; - static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "layout struct and wrapper have different size!" ); - } - - struct BufferMemoryRequirementsInfo2 : public layout::BufferMemoryRequirementsInfo2 - { - BufferMemoryRequirementsInfo2( vk::Buffer buffer_ = vk::Buffer() ) - : layout::BufferMemoryRequirementsInfo2( buffer_ ) - {} - - BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) - : layout::BufferMemoryRequirementsInfo2( rhs ) - {} - - BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BufferMemoryRequirementsInfo2 & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - operator VkBufferMemoryRequirementsInfo2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferMemoryRequirementsInfo2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BufferMemoryRequirementsInfo2::sType; - }; - static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct BufferViewCreateInfo - { - protected: - BufferViewCreateInfo( vk::BufferViewCreateFlags flags_ = vk::BufferViewCreateFlags(), - vk::Buffer buffer_ = vk::Buffer(), - vk::Format format_ = vk::Format::eUndefined, - vk::DeviceSize offset_ = 0, - vk::DeviceSize range_ = 0 ) - : flags( flags_ ) - , buffer( buffer_ ) - , format( format_ ) - , offset( offset_ ) - , range( range_ ) - {} - - BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eBufferViewCreateInfo; - const void* pNext = nullptr; - vk::BufferViewCreateFlags flags; - vk::Buffer buffer; - vk::Format format; - vk::DeviceSize offset; - vk::DeviceSize range; - }; - static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct BufferViewCreateInfo : public layout::BufferViewCreateInfo - { - BufferViewCreateInfo( vk::BufferViewCreateFlags flags_ = vk::BufferViewCreateFlags(), - vk::Buffer buffer_ = vk::Buffer(), - vk::Format format_ = vk::Format::eUndefined, - vk::DeviceSize offset_ = 0, - vk::DeviceSize range_ = 0 ) - : layout::BufferViewCreateInfo( flags_, buffer_, format_, offset_, range_ ) - {} - - BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) - : layout::BufferViewCreateInfo( rhs ) - {} - - BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - BufferViewCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - BufferViewCreateInfo & setFlags( vk::BufferViewCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - BufferViewCreateInfo & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - BufferViewCreateInfo & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - BufferViewCreateInfo & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - BufferViewCreateInfo & setRange( vk::DeviceSize range_ ) - { - range = range_; - return *this; - } - - operator VkBufferViewCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkBufferViewCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( BufferViewCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ) - && ( format == rhs.format ) - && ( offset == rhs.offset ) - && ( range == rhs.range ); - } - - bool operator!=( BufferViewCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::BufferViewCreateInfo::sType; - }; - static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CalibratedTimestampInfoEXT - { - protected: - CalibratedTimestampInfoEXT( vk::TimeDomainEXT timeDomain_ = vk::TimeDomainEXT::eDevice ) - : timeDomain( timeDomain_ ) - {} - - CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; - const void* pNext = nullptr; - vk::TimeDomainEXT timeDomain; - }; - static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct CalibratedTimestampInfoEXT : public layout::CalibratedTimestampInfoEXT - { - CalibratedTimestampInfoEXT( vk::TimeDomainEXT timeDomain_ = vk::TimeDomainEXT::eDevice ) - : layout::CalibratedTimestampInfoEXT( timeDomain_ ) - {} - - CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) - : layout::CalibratedTimestampInfoEXT( rhs ) - {} - - CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CalibratedTimestampInfoEXT & setTimeDomain( vk::TimeDomainEXT timeDomain_ ) - { - timeDomain = timeDomain_; - return *this; - } - - operator VkCalibratedTimestampInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCalibratedTimestampInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CalibratedTimestampInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( timeDomain == rhs.timeDomain ); - } - - bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CalibratedTimestampInfoEXT::sType; - }; - static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CheckpointDataNV - { - protected: - CheckpointDataNV( vk::PipelineStageFlagBits stage_ = vk::PipelineStageFlagBits::eTopOfPipe, - void* pCheckpointMarker_ = nullptr ) - : stage( stage_ ) - , pCheckpointMarker( pCheckpointMarker_ ) - {} - - CheckpointDataNV( VkCheckpointDataNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCheckpointDataNV; - void* pNext = nullptr; - vk::PipelineStageFlagBits stage; - void* pCheckpointMarker; - }; - static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "layout struct and wrapper have different size!" ); - } - - struct CheckpointDataNV : public layout::CheckpointDataNV - { - operator VkCheckpointDataNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCheckpointDataNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CheckpointDataNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stage == rhs.stage ) - && ( pCheckpointMarker == rhs.pCheckpointMarker ); - } - - bool operator!=( CheckpointDataNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CheckpointDataNV::sType; - }; - static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - union ClearColorValue - { - ClearColorValue( const std::array& float32_ = { { 0 } } ) - { - memcpy( float32, float32_.data(), 4 * sizeof( float ) ); - } - - ClearColorValue( const std::array& int32_ ) - { - memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) ); - } - - ClearColorValue( const std::array& uint32_ ) - { - memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); - } - - ClearColorValue & setFloat32( std::array float32_ ) - { - memcpy( float32, float32_.data(), 4 * sizeof( float ) ); - return *this; - } - - ClearColorValue & setInt32( std::array int32_ ) - { - memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) ); - return *this; - } - - ClearColorValue & setUint32( std::array uint32_ ) - { - memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); - return *this; - } - operator VkClearColorValue const&() const - { - return *reinterpret_cast(this); - } - - operator VkClearColorValue &() - { - return *reinterpret_cast(this); - } - - float float32[4]; - int32_t int32[4]; - uint32_t uint32[4]; - }; - - struct ClearDepthStencilValue - { - ClearDepthStencilValue( float depth_ = 0, - uint32_t stencil_ = 0 ) - : depth( depth_ ) - , stencil( stencil_ ) - {} - - ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ClearDepthStencilValue & setDepth( float depth_ ) - { - depth = depth_; - return *this; - } - - ClearDepthStencilValue & setStencil( uint32_t stencil_ ) - { - stencil = stencil_; - return *this; - } - - operator VkClearDepthStencilValue const&() const - { - return *reinterpret_cast( this ); - } - - operator VkClearDepthStencilValue &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ClearDepthStencilValue const& rhs ) const - { - return ( depth == rhs.depth ) - && ( stencil == rhs.stencil ); - } - - bool operator!=( ClearDepthStencilValue const& rhs ) const - { - return !operator==( rhs ); - } - - public: - float depth; - uint32_t stencil; - }; - static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - union ClearValue - { - ClearValue( vk::ClearColorValue color_ = vk::ClearColorValue() ) - { - color = color_; - } - - ClearValue( vk::ClearDepthStencilValue depthStencil_ ) - { - depthStencil = depthStencil_; - } - - ClearValue & setColor( vk::ClearColorValue color_ ) - { - color = color_; - return *this; - } - - ClearValue & setDepthStencil( vk::ClearDepthStencilValue depthStencil_ ) - { - depthStencil = depthStencil_; - return *this; - } - operator VkClearValue const&() const - { - return *reinterpret_cast(this); - } - - operator VkClearValue &() - { - return *reinterpret_cast(this); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - vk::ClearColorValue color; - vk::ClearDepthStencilValue depthStencil; -#else - VkClearColorValue color; - VkClearDepthStencilValue depthStencil; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct ClearAttachment - { - ClearAttachment( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(), - uint32_t colorAttachment_ = 0, - vk::ClearValue clearValue_ = vk::ClearValue() ) - : aspectMask( aspectMask_ ) - , colorAttachment( colorAttachment_ ) - , clearValue( clearValue_ ) - {} - - ClearAttachment( VkClearAttachment const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ClearAttachment& operator=( VkClearAttachment const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ClearAttachment & setAspectMask( vk::ImageAspectFlags aspectMask_ ) - { - aspectMask = aspectMask_; - return *this; - } - - ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) - { - colorAttachment = colorAttachment_; - return *this; - } - - ClearAttachment & setClearValue( vk::ClearValue clearValue_ ) - { - clearValue = clearValue_; - return *this; - } - - operator VkClearAttachment const&() const - { - return *reinterpret_cast( this ); - } - - operator VkClearAttachment &() - { - return *reinterpret_cast( this ); - } - - public: - vk::ImageAspectFlags aspectMask; - uint32_t colorAttachment; - vk::ClearValue clearValue; - }; - static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ClearRect - { - ClearRect( vk::Rect2D rect_ = vk::Rect2D(), - uint32_t baseArrayLayer_ = 0, - uint32_t layerCount_ = 0 ) - : rect( rect_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ClearRect( VkClearRect const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ClearRect& operator=( VkClearRect const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ClearRect & setRect( vk::Rect2D rect_ ) - { - rect = rect_; - return *this; - } - - ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ClearRect & setLayerCount( uint32_t layerCount_ ) - { - layerCount = layerCount_; - return *this; - } - - operator VkClearRect const&() const - { - return *reinterpret_cast( this ); - } - - operator VkClearRect &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ClearRect const& rhs ) const - { - return ( rect == rhs.rect ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ClearRect const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Rect2D rect; - uint32_t baseArrayLayer; - uint32_t layerCount; - }; - static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct IndirectCommandsTokenNVX - { - IndirectCommandsTokenNVX( vk::IndirectCommandsTokenTypeNVX tokenType_ = vk::IndirectCommandsTokenTypeNVX::ePipeline, - vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceSize offset_ = 0 ) - : tokenType( tokenType_ ) - , buffer( buffer_ ) - , offset( offset_ ) - {} - - IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - IndirectCommandsTokenNVX & setTokenType( vk::IndirectCommandsTokenTypeNVX tokenType_ ) - { - tokenType = tokenType_; - return *this; - } - - IndirectCommandsTokenNVX & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - IndirectCommandsTokenNVX & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - operator VkIndirectCommandsTokenNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsTokenNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( IndirectCommandsTokenNVX const& rhs ) const - { - return ( tokenType == rhs.tokenType ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ); - } - - bool operator!=( IndirectCommandsTokenNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::IndirectCommandsTokenTypeNVX tokenType; - vk::Buffer buffer; - vk::DeviceSize offset; - }; - static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CmdProcessCommandsInfoNVX - { - protected: - CmdProcessCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(), - vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(), - uint32_t indirectCommandsTokenCount_ = 0, - const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, - uint32_t maxSequencesCount_ = 0, - vk::CommandBuffer targetCommandBuffer_ = vk::CommandBuffer(), - vk::Buffer sequencesCountBuffer_ = vk::Buffer(), - vk::DeviceSize sequencesCountOffset_ = 0, - vk::Buffer sequencesIndexBuffer_ = vk::Buffer(), - vk::DeviceSize sequencesIndexOffset_ = 0 ) - : objectTable( objectTable_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , indirectCommandsTokenCount( indirectCommandsTokenCount_ ) - , pIndirectCommandsTokens( pIndirectCommandsTokens_ ) - , maxSequencesCount( maxSequencesCount_ ) - , targetCommandBuffer( targetCommandBuffer_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) - {} - - CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX; - const void* pNext = nullptr; - vk::ObjectTableNVX objectTable; - vk::IndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens; - uint32_t maxSequencesCount; - vk::CommandBuffer targetCommandBuffer; - vk::Buffer sequencesCountBuffer; - vk::DeviceSize sequencesCountOffset; - vk::Buffer sequencesIndexBuffer; - vk::DeviceSize sequencesIndexOffset; - }; - static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "layout struct and wrapper have different size!" ); - } - - struct CmdProcessCommandsInfoNVX : public layout::CmdProcessCommandsInfoNVX - { - CmdProcessCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(), - vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(), - uint32_t indirectCommandsTokenCount_ = 0, - const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, - uint32_t maxSequencesCount_ = 0, - vk::CommandBuffer targetCommandBuffer_ = vk::CommandBuffer(), - vk::Buffer sequencesCountBuffer_ = vk::Buffer(), - vk::DeviceSize sequencesCountOffset_ = 0, - vk::Buffer sequencesIndexBuffer_ = vk::Buffer(), - vk::DeviceSize sequencesIndexOffset_ = 0 ) - : layout::CmdProcessCommandsInfoNVX( objectTable_, indirectCommandsLayout_, indirectCommandsTokenCount_, pIndirectCommandsTokens_, maxSequencesCount_, targetCommandBuffer_, sequencesCountBuffer_, sequencesCountOffset_, sequencesIndexBuffer_, sequencesIndexOffset_ ) - {} - - CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) - : layout::CmdProcessCommandsInfoNVX( rhs ) - {} - - CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CmdProcessCommandsInfoNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CmdProcessCommandsInfoNVX & setObjectTable( vk::ObjectTableNVX objectTable_ ) - { - objectTable = objectTable_; - return *this; - } - - CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - CmdProcessCommandsInfoNVX & setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) - { - indirectCommandsTokenCount = indirectCommandsTokenCount_; - return *this; - } - - CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) - { - pIndirectCommandsTokens = pIndirectCommandsTokens_; - return *this; - } - - CmdProcessCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - CmdProcessCommandsInfoNVX & setTargetCommandBuffer( vk::CommandBuffer targetCommandBuffer_ ) - { - targetCommandBuffer = targetCommandBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesCountBuffer( vk::Buffer sequencesCountBuffer_ ) - { - sequencesCountBuffer = sequencesCountBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesCountOffset( vk::DeviceSize sequencesCountOffset_ ) - { - sequencesCountOffset = sequencesCountOffset_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( vk::Buffer sequencesIndexBuffer_ ) - { - sequencesIndexBuffer = sequencesIndexBuffer_; - return *this; - } - - CmdProcessCommandsInfoNVX & setSequencesIndexOffset( vk::DeviceSize sequencesIndexOffset_ ) - { - sequencesIndexOffset = sequencesIndexOffset_; - return *this; - } - - operator VkCmdProcessCommandsInfoNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCmdProcessCommandsInfoNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectTable == rhs.objectTable ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount ) - && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens ) - && ( maxSequencesCount == rhs.maxSequencesCount ) - && ( targetCommandBuffer == rhs.targetCommandBuffer ) - && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) - && ( sequencesCountOffset == rhs.sequencesCountOffset ) - && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) - && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); - } - - bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CmdProcessCommandsInfoNVX::sType; - }; - static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CmdReserveSpaceForCommandsInfoNVX - { - protected: - CmdReserveSpaceForCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(), - vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(), - uint32_t maxSequencesCount_ = 0 ) - : objectTable( objectTable_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , maxSequencesCount( maxSequencesCount_ ) - {} - - CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX; - const void* pNext = nullptr; - vk::ObjectTableNVX objectTable; - vk::IndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; - }; - static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "layout struct and wrapper have different size!" ); - } - - struct CmdReserveSpaceForCommandsInfoNVX : public layout::CmdReserveSpaceForCommandsInfoNVX - { - CmdReserveSpaceForCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(), - vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(), - uint32_t maxSequencesCount_ = 0 ) - : layout::CmdReserveSpaceForCommandsInfoNVX( objectTable_, indirectCommandsLayout_, maxSequencesCount_ ) - {} - - CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) - : layout::CmdReserveSpaceForCommandsInfoNVX( rhs ) - {} - - CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setObjectTable( vk::ObjectTableNVX objectTable_ ) - { - objectTable = objectTable_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - CmdReserveSpaceForCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) - { - maxSequencesCount = maxSequencesCount_; - return *this; - } - - operator VkCmdReserveSpaceForCommandsInfoNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCmdReserveSpaceForCommandsInfoNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectTable == rhs.objectTable ) - && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) - && ( maxSequencesCount == rhs.maxSequencesCount ); - } - - bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CmdReserveSpaceForCommandsInfoNVX::sType; - }; - static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct CoarseSampleLocationNV - { - CoarseSampleLocationNV( uint32_t pixelX_ = 0, - uint32_t pixelY_ = 0, - uint32_t sample_ = 0 ) - : pixelX( pixelX_ ) - , pixelY( pixelY_ ) - , sample( sample_ ) - {} - - CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) - { - pixelX = pixelX_; - return *this; - } - - CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) - { - pixelY = pixelY_; - return *this; - } - - CoarseSampleLocationNV & setSample( uint32_t sample_ ) - { - sample = sample_; - return *this; - } - - operator VkCoarseSampleLocationNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCoarseSampleLocationNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CoarseSampleLocationNV const& rhs ) const - { - return ( pixelX == rhs.pixelX ) - && ( pixelY == rhs.pixelY ) - && ( sample == rhs.sample ); - } - - bool operator!=( CoarseSampleLocationNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t pixelX; - uint32_t pixelY; - uint32_t sample; - }; - static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct CoarseSampleOrderCustomNV - { - CoarseSampleOrderCustomNV( vk::ShadingRatePaletteEntryNV shadingRate_ = vk::ShadingRatePaletteEntryNV::eNoInvocations, - uint32_t sampleCount_ = 0, - uint32_t sampleLocationCount_ = 0, - const vk::CoarseSampleLocationNV* pSampleLocations_ = nullptr ) - : shadingRate( shadingRate_ ) - , sampleCount( sampleCount_ ) - , sampleLocationCount( sampleLocationCount_ ) - , pSampleLocations( pSampleLocations_ ) - {} - - CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CoarseSampleOrderCustomNV & setShadingRate( vk::ShadingRatePaletteEntryNV shadingRate_ ) - { - shadingRate = shadingRate_; - return *this; - } - - CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) - { - sampleCount = sampleCount_; - return *this; - } - - CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) - { - sampleLocationCount = sampleLocationCount_; - return *this; - } - - CoarseSampleOrderCustomNV & setPSampleLocations( const vk::CoarseSampleLocationNV* pSampleLocations_ ) - { - pSampleLocations = pSampleLocations_; - return *this; - } - - operator VkCoarseSampleOrderCustomNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCoarseSampleOrderCustomNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CoarseSampleOrderCustomNV const& rhs ) const - { - return ( shadingRate == rhs.shadingRate ) - && ( sampleCount == rhs.sampleCount ) - && ( sampleLocationCount == rhs.sampleLocationCount ) - && ( pSampleLocations == rhs.pSampleLocations ); - } - - bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ShadingRatePaletteEntryNV shadingRate; - uint32_t sampleCount; - uint32_t sampleLocationCount; - const vk::CoarseSampleLocationNV* pSampleLocations; - }; - static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CommandBufferAllocateInfo - { - protected: - CommandBufferAllocateInfo( vk::CommandPool commandPool_ = vk::CommandPool(), - vk::CommandBufferLevel level_ = vk::CommandBufferLevel::ePrimary, - uint32_t commandBufferCount_ = 0 ) - : commandPool( commandPool_ ) - , level( level_ ) - , commandBufferCount( commandBufferCount_ ) - {} - - CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCommandBufferAllocateInfo; - const void* pNext = nullptr; - vk::CommandPool commandPool; - vk::CommandBufferLevel level; - uint32_t commandBufferCount; - }; - static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "layout struct and wrapper have different size!" ); - } - - struct CommandBufferAllocateInfo : public layout::CommandBufferAllocateInfo - { - CommandBufferAllocateInfo( vk::CommandPool commandPool_ = vk::CommandPool(), - vk::CommandBufferLevel level_ = vk::CommandBufferLevel::ePrimary, - uint32_t commandBufferCount_ = 0 ) - : layout::CommandBufferAllocateInfo( commandPool_, level_, commandBufferCount_ ) - {} - - CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) - : layout::CommandBufferAllocateInfo( rhs ) - {} - - CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CommandBufferAllocateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CommandBufferAllocateInfo & setCommandPool( vk::CommandPool commandPool_ ) - { - commandPool = commandPool_; - return *this; - } - - CommandBufferAllocateInfo & setLevel( vk::CommandBufferLevel level_ ) - { - level = level_; - return *this; - } - - CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) - { - commandBufferCount = commandBufferCount_; - return *this; - } - - operator VkCommandBufferAllocateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCommandBufferAllocateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CommandBufferAllocateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( commandPool == rhs.commandPool ) - && ( level == rhs.level ) - && ( commandBufferCount == rhs.commandBufferCount ); - } - - bool operator!=( CommandBufferAllocateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CommandBufferAllocateInfo::sType; - }; - static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CommandBufferInheritanceInfo - { - protected: - CommandBufferInheritanceInfo( vk::RenderPass renderPass_ = vk::RenderPass(), - uint32_t subpass_ = 0, - vk::Framebuffer framebuffer_ = vk::Framebuffer(), - vk::Bool32 occlusionQueryEnable_ = 0, - vk::QueryControlFlags queryFlags_ = vk::QueryControlFlags(), - vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) - : renderPass( renderPass_ ) - , subpass( subpass_ ) - , framebuffer( framebuffer_ ) - , occlusionQueryEnable( occlusionQueryEnable_ ) - , queryFlags( queryFlags_ ) - , pipelineStatistics( pipelineStatistics_ ) - {} - - CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCommandBufferInheritanceInfo; - const void* pNext = nullptr; - vk::RenderPass renderPass; - uint32_t subpass; - vk::Framebuffer framebuffer; - vk::Bool32 occlusionQueryEnable; - vk::QueryControlFlags queryFlags; - vk::QueryPipelineStatisticFlags pipelineStatistics; - }; - static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "layout struct and wrapper have different size!" ); - } - - struct CommandBufferInheritanceInfo : public layout::CommandBufferInheritanceInfo - { - CommandBufferInheritanceInfo( vk::RenderPass renderPass_ = vk::RenderPass(), - uint32_t subpass_ = 0, - vk::Framebuffer framebuffer_ = vk::Framebuffer(), - vk::Bool32 occlusionQueryEnable_ = 0, - vk::QueryControlFlags queryFlags_ = vk::QueryControlFlags(), - vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) - : layout::CommandBufferInheritanceInfo( renderPass_, subpass_, framebuffer_, occlusionQueryEnable_, queryFlags_, pipelineStatistics_ ) - {} - - CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) - : layout::CommandBufferInheritanceInfo( rhs ) - {} - - CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CommandBufferInheritanceInfo & setRenderPass( vk::RenderPass renderPass_ ) - { - renderPass = renderPass_; - return *this; - } - - CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) - { - subpass = subpass_; - return *this; - } - - CommandBufferInheritanceInfo & setFramebuffer( vk::Framebuffer framebuffer_ ) - { - framebuffer = framebuffer_; - return *this; - } - - CommandBufferInheritanceInfo & setOcclusionQueryEnable( vk::Bool32 occlusionQueryEnable_ ) - { - occlusionQueryEnable = occlusionQueryEnable_; - return *this; - } - - CommandBufferInheritanceInfo & setQueryFlags( vk::QueryControlFlags queryFlags_ ) - { - queryFlags = queryFlags_; - return *this; - } - - CommandBufferInheritanceInfo & setPipelineStatistics( vk::QueryPipelineStatisticFlags pipelineStatistics_ ) - { - pipelineStatistics = pipelineStatistics_; - return *this; - } - - operator VkCommandBufferInheritanceInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCommandBufferInheritanceInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CommandBufferInheritanceInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( subpass == rhs.subpass ) - && ( framebuffer == rhs.framebuffer ) - && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) - && ( queryFlags == rhs.queryFlags ) - && ( pipelineStatistics == rhs.pipelineStatistics ); - } - - bool operator!=( CommandBufferInheritanceInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CommandBufferInheritanceInfo::sType; - }; - static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CommandBufferBeginInfo - { - protected: - CommandBufferBeginInfo( vk::CommandBufferUsageFlags flags_ = vk::CommandBufferUsageFlags(), - const vk::CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) - : flags( flags_ ) - , pInheritanceInfo( pInheritanceInfo_ ) - {} - - CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCommandBufferBeginInfo; - const void* pNext = nullptr; - vk::CommandBufferUsageFlags flags; - const vk::CommandBufferInheritanceInfo* pInheritanceInfo; - }; - static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "layout struct and wrapper have different size!" ); - } - - struct CommandBufferBeginInfo : public layout::CommandBufferBeginInfo - { - CommandBufferBeginInfo( vk::CommandBufferUsageFlags flags_ = vk::CommandBufferUsageFlags(), - const vk::CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) - : layout::CommandBufferBeginInfo( flags_, pInheritanceInfo_ ) - {} - - CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) - : layout::CommandBufferBeginInfo( rhs ) - {} - - CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CommandBufferBeginInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CommandBufferBeginInfo & setFlags( vk::CommandBufferUsageFlags flags_ ) - { - flags = flags_; - return *this; - } - - CommandBufferBeginInfo & setPInheritanceInfo( const vk::CommandBufferInheritanceInfo* pInheritanceInfo_ ) - { - pInheritanceInfo = pInheritanceInfo_; - return *this; - } - - operator VkCommandBufferBeginInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCommandBufferBeginInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CommandBufferBeginInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pInheritanceInfo == rhs.pInheritanceInfo ); - } - - bool operator!=( CommandBufferBeginInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CommandBufferBeginInfo::sType; - }; - static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CommandBufferInheritanceConditionalRenderingInfoEXT - { - protected: - CommandBufferInheritanceConditionalRenderingInfoEXT( vk::Bool32 conditionalRenderingEnable_ = 0 ) - : conditionalRenderingEnable( conditionalRenderingEnable_ ) - {} - - CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; - const void* pNext = nullptr; - vk::Bool32 conditionalRenderingEnable; - }; - static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct CommandBufferInheritanceConditionalRenderingInfoEXT : public layout::CommandBufferInheritanceConditionalRenderingInfoEXT - { - CommandBufferInheritanceConditionalRenderingInfoEXT( vk::Bool32 conditionalRenderingEnable_ = 0 ) - : layout::CommandBufferInheritanceConditionalRenderingInfoEXT( conditionalRenderingEnable_ ) - {} - - CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) - : layout::CommandBufferInheritanceConditionalRenderingInfoEXT( rhs ) - {} - - CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( vk::Bool32 conditionalRenderingEnable_ ) - { - conditionalRenderingEnable = conditionalRenderingEnable_; - return *this; - } - - operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); - } - - bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CommandBufferInheritanceConditionalRenderingInfoEXT::sType; - }; - static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CommandPoolCreateInfo - { - protected: - CommandPoolCreateInfo( vk::CommandPoolCreateFlags flags_ = vk::CommandPoolCreateFlags(), - uint32_t queueFamilyIndex_ = 0 ) - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - {} - - CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCommandPoolCreateInfo; - const void* pNext = nullptr; - vk::CommandPoolCreateFlags flags; - uint32_t queueFamilyIndex; - }; - static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct CommandPoolCreateInfo : public layout::CommandPoolCreateInfo - { - CommandPoolCreateInfo( vk::CommandPoolCreateFlags flags_ = vk::CommandPoolCreateFlags(), - uint32_t queueFamilyIndex_ = 0 ) - : layout::CommandPoolCreateInfo( flags_, queueFamilyIndex_ ) - {} - - CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) - : layout::CommandPoolCreateInfo( rhs ) - {} - - CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CommandPoolCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CommandPoolCreateInfo & setFlags( vk::CommandPoolCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - operator VkCommandPoolCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCommandPoolCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CommandPoolCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ); - } - - bool operator!=( CommandPoolCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CommandPoolCreateInfo::sType; - }; - static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SpecializationMapEntry - { - SpecializationMapEntry( uint32_t constantID_ = 0, - uint32_t offset_ = 0, - size_t size_ = 0 ) - : constantID( constantID_ ) - , offset( offset_ ) - , size( size_ ) - {} - - SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SpecializationMapEntry & setConstantID( uint32_t constantID_ ) - { - constantID = constantID_; - return *this; - } - - SpecializationMapEntry & setOffset( uint32_t offset_ ) - { - offset = offset_; - return *this; - } - - SpecializationMapEntry & setSize( size_t size_ ) - { - size = size_; - return *this; - } - - operator VkSpecializationMapEntry const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSpecializationMapEntry &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SpecializationMapEntry const& rhs ) const - { - return ( constantID == rhs.constantID ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( SpecializationMapEntry const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t constantID; - uint32_t offset; - size_t size; - }; - static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SpecializationInfo - { - SpecializationInfo( uint32_t mapEntryCount_ = 0, - const vk::SpecializationMapEntry* pMapEntries_ = nullptr, - size_t dataSize_ = 0, - const void* pData_ = nullptr ) - : mapEntryCount( mapEntryCount_ ) - , pMapEntries( pMapEntries_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) - {} - - SpecializationInfo( VkSpecializationInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) - { - mapEntryCount = mapEntryCount_; - return *this; - } - - SpecializationInfo & setPMapEntries( const vk::SpecializationMapEntry* pMapEntries_ ) - { - pMapEntries = pMapEntries_; - return *this; - } - - SpecializationInfo & setDataSize( size_t dataSize_ ) - { - dataSize = dataSize_; - return *this; - } - - SpecializationInfo & setPData( const void* pData_ ) - { - pData = pData_; - return *this; - } - - operator VkSpecializationInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSpecializationInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SpecializationInfo const& rhs ) const - { - return ( mapEntryCount == rhs.mapEntryCount ) - && ( pMapEntries == rhs.pMapEntries ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); - } - - bool operator!=( SpecializationInfo const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t mapEntryCount; - const vk::SpecializationMapEntry* pMapEntries; - size_t dataSize; - const void* pData; - }; - static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineShaderStageCreateInfo - { - protected: - PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags flags_ = vk::PipelineShaderStageCreateFlags(), - vk::ShaderStageFlagBits stage_ = vk::ShaderStageFlagBits::eVertex, - vk::ShaderModule module_ = vk::ShaderModule(), - const char* pName_ = nullptr, - const vk::SpecializationInfo* pSpecializationInfo_ = nullptr ) - : flags( flags_ ) - , stage( stage_ ) - , module( module_ ) - , pName( pName_ ) - , pSpecializationInfo( pSpecializationInfo_ ) - {} - - PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; - const void* pNext = nullptr; - vk::PipelineShaderStageCreateFlags flags; - vk::ShaderStageFlagBits stage; - vk::ShaderModule module; - const char* pName; - const vk::SpecializationInfo* pSpecializationInfo; - }; - static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineShaderStageCreateInfo : public layout::PipelineShaderStageCreateInfo - { - PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags flags_ = vk::PipelineShaderStageCreateFlags(), - vk::ShaderStageFlagBits stage_ = vk::ShaderStageFlagBits::eVertex, - vk::ShaderModule module_ = vk::ShaderModule(), - const char* pName_ = nullptr, - const vk::SpecializationInfo* pSpecializationInfo_ = nullptr ) - : layout::PipelineShaderStageCreateInfo( flags_, stage_, module_, pName_, pSpecializationInfo_ ) - {} - - PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) - : layout::PipelineShaderStageCreateInfo( rhs ) - {} - - PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineShaderStageCreateInfo & setFlags( vk::PipelineShaderStageCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineShaderStageCreateInfo & setStage( vk::ShaderStageFlagBits stage_ ) - { - stage = stage_; - return *this; - } - - PipelineShaderStageCreateInfo & setModule( vk::ShaderModule module_ ) - { - module = module_; - return *this; - } - - PipelineShaderStageCreateInfo & setPName( const char* pName_ ) - { - pName = pName_; - return *this; - } - - PipelineShaderStageCreateInfo & setPSpecializationInfo( const vk::SpecializationInfo* pSpecializationInfo_ ) - { - pSpecializationInfo = pSpecializationInfo_; - return *this; - } - - operator VkPipelineShaderStageCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineShaderStageCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineShaderStageCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stage == rhs.stage ) - && ( module == rhs.module ) - && ( pName == rhs.pName ) - && ( pSpecializationInfo == rhs.pSpecializationInfo ); - } - - bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineShaderStageCreateInfo::sType; - }; - static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ComputePipelineCreateInfo - { - protected: - ComputePipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(), - vk::PipelineShaderStageCreateInfo stage_ = vk::PipelineShaderStageCreateInfo(), - vk::PipelineLayout layout_ = vk::PipelineLayout(), - vk::Pipeline basePipelineHandle_ = vk::Pipeline(), - int32_t basePipelineIndex_ = 0 ) - : flags( flags_ ) - , stage( stage_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - {} - - ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eComputePipelineCreateInfo; - const void* pNext = nullptr; - vk::PipelineCreateFlags flags; - vk::PipelineShaderStageCreateInfo stage; - vk::PipelineLayout layout; - vk::Pipeline basePipelineHandle; - int32_t basePipelineIndex; - }; - static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ComputePipelineCreateInfo : public layout::ComputePipelineCreateInfo - { - ComputePipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(), - vk::PipelineShaderStageCreateInfo stage_ = vk::PipelineShaderStageCreateInfo(), - vk::PipelineLayout layout_ = vk::PipelineLayout(), - vk::Pipeline basePipelineHandle_ = vk::Pipeline(), - int32_t basePipelineIndex_ = 0 ) - : layout::ComputePipelineCreateInfo( flags_, stage_, layout_, basePipelineHandle_, basePipelineIndex_ ) - {} - - ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) - : layout::ComputePipelineCreateInfo( rhs ) - {} - - ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ComputePipelineCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ComputePipelineCreateInfo & setFlags( vk::PipelineCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - ComputePipelineCreateInfo & setStage( vk::PipelineShaderStageCreateInfo stage_ ) - { - stage = stage_; - return *this; - } - - ComputePipelineCreateInfo & setLayout( vk::PipelineLayout layout_ ) - { - layout = layout_; - return *this; - } - - ComputePipelineCreateInfo & setBasePipelineHandle( vk::Pipeline basePipelineHandle_ ) - { - basePipelineHandle = basePipelineHandle_; - return *this; - } - - ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) - { - basePipelineIndex = basePipelineIndex_; - return *this; - } - - operator VkComputePipelineCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkComputePipelineCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ComputePipelineCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stage == rhs.stage ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); - } - - bool operator!=( ComputePipelineCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ComputePipelineCreateInfo::sType; - }; - static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ConditionalRenderingBeginInfoEXT - { - protected: - ConditionalRenderingBeginInfoEXT( vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceSize offset_ = 0, - vk::ConditionalRenderingFlagsEXT flags_ = vk::ConditionalRenderingFlagsEXT() ) - : buffer( buffer_ ) - , offset( offset_ ) - , flags( flags_ ) - {} - - ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; - const void* pNext = nullptr; - vk::Buffer buffer; - vk::DeviceSize offset; - vk::ConditionalRenderingFlagsEXT flags; - }; - static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ConditionalRenderingBeginInfoEXT : public layout::ConditionalRenderingBeginInfoEXT - { - ConditionalRenderingBeginInfoEXT( vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceSize offset_ = 0, - vk::ConditionalRenderingFlagsEXT flags_ = vk::ConditionalRenderingFlagsEXT() ) - : layout::ConditionalRenderingBeginInfoEXT( buffer_, offset_, flags_ ) - {} - - ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) - : layout::ConditionalRenderingBeginInfoEXT( rhs ) - {} - - ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - ConditionalRenderingBeginInfoEXT & setFlags( vk::ConditionalRenderingFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - operator VkConditionalRenderingBeginInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkConditionalRenderingBeginInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( flags == rhs.flags ); - } - - bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ConditionalRenderingBeginInfoEXT::sType; - }; - static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ConformanceVersionKHR - { - ConformanceVersionKHR( uint8_t major_ = 0, - uint8_t minor_ = 0, - uint8_t subminor_ = 0, - uint8_t patch_ = 0 ) - : major( major_ ) - , minor( minor_ ) - , subminor( subminor_ ) - , patch( patch_ ) - {} - - ConformanceVersionKHR( VkConformanceVersionKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ConformanceVersionKHR& operator=( VkConformanceVersionKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ConformanceVersionKHR & setMajor( uint8_t major_ ) - { - major = major_; - return *this; - } - - ConformanceVersionKHR & setMinor( uint8_t minor_ ) - { - minor = minor_; - return *this; - } - - ConformanceVersionKHR & setSubminor( uint8_t subminor_ ) - { - subminor = subminor_; - return *this; - } - - ConformanceVersionKHR & setPatch( uint8_t patch_ ) - { - patch = patch_; - return *this; - } - - operator VkConformanceVersionKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkConformanceVersionKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ConformanceVersionKHR const& rhs ) const - { - return ( major == rhs.major ) - && ( minor == rhs.minor ) - && ( subminor == rhs.subminor ) - && ( patch == rhs.patch ); - } - - bool operator!=( ConformanceVersionKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint8_t major; - uint8_t minor; - uint8_t subminor; - uint8_t patch; - }; - static_assert( sizeof( ConformanceVersionKHR ) == sizeof( VkConformanceVersionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CooperativeMatrixPropertiesNV - { - protected: - CooperativeMatrixPropertiesNV( uint32_t MSize_ = 0, - uint32_t NSize_ = 0, - uint32_t KSize_ = 0, - vk::ComponentTypeNV AType_ = vk::ComponentTypeNV::eFloat16, - vk::ComponentTypeNV BType_ = vk::ComponentTypeNV::eFloat16, - vk::ComponentTypeNV CType_ = vk::ComponentTypeNV::eFloat16, - vk::ComponentTypeNV DType_ = vk::ComponentTypeNV::eFloat16, - vk::ScopeNV scope_ = vk::ScopeNV::eDevice ) - : MSize( MSize_ ) - , NSize( NSize_ ) - , KSize( KSize_ ) - , AType( AType_ ) - , BType( BType_ ) - , CType( CType_ ) - , DType( DType_ ) - , scope( scope_ ) - {} - - CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; - void* pNext = nullptr; - uint32_t MSize; - uint32_t NSize; - uint32_t KSize; - vk::ComponentTypeNV AType; - vk::ComponentTypeNV BType; - vk::ComponentTypeNV CType; - vk::ComponentTypeNV DType; - vk::ScopeNV scope; - }; - static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct CooperativeMatrixPropertiesNV : public layout::CooperativeMatrixPropertiesNV - { - CooperativeMatrixPropertiesNV( uint32_t MSize_ = 0, - uint32_t NSize_ = 0, - uint32_t KSize_ = 0, - vk::ComponentTypeNV AType_ = vk::ComponentTypeNV::eFloat16, - vk::ComponentTypeNV BType_ = vk::ComponentTypeNV::eFloat16, - vk::ComponentTypeNV CType_ = vk::ComponentTypeNV::eFloat16, - vk::ComponentTypeNV DType_ = vk::ComponentTypeNV::eFloat16, - vk::ScopeNV scope_ = vk::ScopeNV::eDevice ) - : layout::CooperativeMatrixPropertiesNV( MSize_, NSize_, KSize_, AType_, BType_, CType_, DType_, scope_ ) - {} - - CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) - : layout::CooperativeMatrixPropertiesNV( rhs ) - {} - - CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) - { - MSize = MSize_; - return *this; - } - - CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) - { - NSize = NSize_; - return *this; - } - - CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) - { - KSize = KSize_; - return *this; - } - - CooperativeMatrixPropertiesNV & setAType( vk::ComponentTypeNV AType_ ) - { - AType = AType_; - return *this; - } - - CooperativeMatrixPropertiesNV & setBType( vk::ComponentTypeNV BType_ ) - { - BType = BType_; - return *this; - } - - CooperativeMatrixPropertiesNV & setCType( vk::ComponentTypeNV CType_ ) - { - CType = CType_; - return *this; - } - - CooperativeMatrixPropertiesNV & setDType( vk::ComponentTypeNV DType_ ) - { - DType = DType_; - return *this; - } - - CooperativeMatrixPropertiesNV & setScope( vk::ScopeNV scope_ ) - { - scope = scope_; - return *this; - } - - operator VkCooperativeMatrixPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCooperativeMatrixPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( MSize == rhs.MSize ) - && ( NSize == rhs.NSize ) - && ( KSize == rhs.KSize ) - && ( AType == rhs.AType ) - && ( BType == rhs.BType ) - && ( CType == rhs.CType ) - && ( DType == rhs.DType ) - && ( scope == rhs.scope ); - } - - bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CooperativeMatrixPropertiesNV::sType; - }; - static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct CopyDescriptorSet - { - protected: - CopyDescriptorSet( vk::DescriptorSet srcSet_ = vk::DescriptorSet(), - uint32_t srcBinding_ = 0, - uint32_t srcArrayElement_ = 0, - vk::DescriptorSet dstSet_ = vk::DescriptorSet(), - uint32_t dstBinding_ = 0, - uint32_t dstArrayElement_ = 0, - uint32_t descriptorCount_ = 0 ) - : srcSet( srcSet_ ) - , srcBinding( srcBinding_ ) - , srcArrayElement( srcArrayElement_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - {} - - CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eCopyDescriptorSet; - const void* pNext = nullptr; - vk::DescriptorSet srcSet; - uint32_t srcBinding; - uint32_t srcArrayElement; - vk::DescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - }; - static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "layout struct and wrapper have different size!" ); - } - - struct CopyDescriptorSet : public layout::CopyDescriptorSet - { - CopyDescriptorSet( vk::DescriptorSet srcSet_ = vk::DescriptorSet(), - uint32_t srcBinding_ = 0, - uint32_t srcArrayElement_ = 0, - vk::DescriptorSet dstSet_ = vk::DescriptorSet(), - uint32_t dstBinding_ = 0, - uint32_t dstArrayElement_ = 0, - uint32_t descriptorCount_ = 0 ) - : layout::CopyDescriptorSet( srcSet_, srcBinding_, srcArrayElement_, dstSet_, dstBinding_, dstArrayElement_, descriptorCount_ ) - {} - - CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) - : layout::CopyDescriptorSet( rhs ) - {} - - CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - CopyDescriptorSet & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - CopyDescriptorSet & setSrcSet( vk::DescriptorSet srcSet_ ) - { - srcSet = srcSet_; - return *this; - } - - CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) - { - srcBinding = srcBinding_; - return *this; - } - - CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) - { - srcArrayElement = srcArrayElement_; - return *this; - } - - CopyDescriptorSet & setDstSet( vk::DescriptorSet dstSet_ ) - { - dstSet = dstSet_; - return *this; - } - - CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) - { - dstBinding = dstBinding_; - return *this; - } - - CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) - { - dstArrayElement = dstArrayElement_; - return *this; - } - - CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) - { - descriptorCount = descriptorCount_; - return *this; - } - - operator VkCopyDescriptorSet const&() const - { - return *reinterpret_cast( this ); - } - - operator VkCopyDescriptorSet &() - { - return *reinterpret_cast( this ); - } - - bool operator==( CopyDescriptorSet const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSet == rhs.srcSet ) - && ( srcBinding == rhs.srcBinding ) - && ( srcArrayElement == rhs.srcArrayElement ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ); - } - - bool operator!=( CopyDescriptorSet const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::CopyDescriptorSet::sType; - }; - static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct D3D12FenceSubmitInfoKHR - { - protected: - D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0, - const uint64_t* pWaitSemaphoreValues_ = nullptr, - uint32_t signalSemaphoreValuesCount_ = 0, - const uint64_t* pSignalSemaphoreValues_ = nullptr ) - : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) - {} - - D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; - const void* pNext = nullptr; - uint32_t waitSemaphoreValuesCount; - const uint64_t* pWaitSemaphoreValues; - uint32_t signalSemaphoreValuesCount; - const uint64_t* pSignalSemaphoreValues; - }; - static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct D3D12FenceSubmitInfoKHR : public layout::D3D12FenceSubmitInfoKHR - { - D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0, - const uint64_t* pWaitSemaphoreValues_ = nullptr, - uint32_t signalSemaphoreValuesCount_ = 0, - const uint64_t* pSignalSemaphoreValues_ = nullptr ) - : layout::D3D12FenceSubmitInfoKHR( waitSemaphoreValuesCount_, pWaitSemaphoreValues_, signalSemaphoreValuesCount_, pSignalSemaphoreValues_ ) - {} - - D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) - : layout::D3D12FenceSubmitInfoKHR( rhs ) - {} - - D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) - { - waitSemaphoreValuesCount = waitSemaphoreValuesCount_; - return *this; - } - - D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) - { - pWaitSemaphoreValues = pWaitSemaphoreValues_; - return *this; - } - - D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) - { - signalSemaphoreValuesCount = signalSemaphoreValuesCount_; - return *this; - } - - D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) - { - pSignalSemaphoreValues = pSignalSemaphoreValues_; - return *this; - } - - operator VkD3D12FenceSubmitInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkD3D12FenceSubmitInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) - && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) - && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) - && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); - } - - bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::D3D12FenceSubmitInfoKHR::sType; - }; - static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct DebugMarkerMarkerInfoEXT - { - protected: - DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, - std::array const& color_ = { { 0 } } ) - : pMarkerName( pMarkerName_ ) - { - memcpy( &color, color_.data(), 4 * sizeof( float ) ); - - } - - DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; - const void* pNext = nullptr; - const char* pMarkerName; - float color[4]; - }; - static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugMarkerMarkerInfoEXT : public layout::DebugMarkerMarkerInfoEXT - { - DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, - std::array const& color_ = { { 0 } } ) - : layout::DebugMarkerMarkerInfoEXT( pMarkerName_, color_ ) - {} - - DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) - : layout::DebugMarkerMarkerInfoEXT( rhs ) - {} - - DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) - { - pMarkerName = pMarkerName_; - return *this; - } - - DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) - { - memcpy( color, color_.data(), 4 * sizeof( float ) ); - return *this; - } - - operator VkDebugMarkerMarkerInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugMarkerMarkerInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pMarkerName == rhs.pMarkerName ) - && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); - } - - bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugMarkerMarkerInfoEXT::sType; - }; - static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugMarkerObjectNameInfoEXT - { - protected: - DebugMarkerObjectNameInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = 0, - const char* pObjectName_ = nullptr ) - : objectType( objectType_ ) - , object( object_ ) - , pObjectName( pObjectName_ ) - {} - - DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; - const void* pNext = nullptr; - vk::DebugReportObjectTypeEXT objectType; - uint64_t object; - const char* pObjectName; - }; - static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugMarkerObjectNameInfoEXT : public layout::DebugMarkerObjectNameInfoEXT - { - DebugMarkerObjectNameInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = 0, - const char* pObjectName_ = nullptr ) - : layout::DebugMarkerObjectNameInfoEXT( objectType_, object_, pObjectName_ ) - {} - - DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) - : layout::DebugMarkerObjectNameInfoEXT( rhs ) - {} - - DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugMarkerObjectNameInfoEXT & setObjectType( vk::DebugReportObjectTypeEXT objectType_ ) - { - objectType = objectType_; - return *this; - } - - DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) - { - object = object_; - return *this; - } - - DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) - { - pObjectName = pObjectName_; - return *this; - } - - operator VkDebugMarkerObjectNameInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugMarkerObjectNameInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( object == rhs.object ) - && ( pObjectName == rhs.pObjectName ); - } - - bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugMarkerObjectNameInfoEXT::sType; - }; - static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugMarkerObjectTagInfoEXT - { - protected: - DebugMarkerObjectTagInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = 0, - uint64_t tagName_ = 0, - size_t tagSize_ = 0, - const void* pTag_ = nullptr ) - : objectType( objectType_ ) - , object( object_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) - {} - - DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; - const void* pNext = nullptr; - vk::DebugReportObjectTypeEXT objectType; - uint64_t object; - uint64_t tagName; - size_t tagSize; - const void* pTag; - }; - static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugMarkerObjectTagInfoEXT : public layout::DebugMarkerObjectTagInfoEXT - { - DebugMarkerObjectTagInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = 0, - uint64_t tagName_ = 0, - size_t tagSize_ = 0, - const void* pTag_ = nullptr ) - : layout::DebugMarkerObjectTagInfoEXT( objectType_, object_, tagName_, tagSize_, pTag_ ) - {} - - DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) - : layout::DebugMarkerObjectTagInfoEXT( rhs ) - {} - - DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugMarkerObjectTagInfoEXT & setObjectType( vk::DebugReportObjectTypeEXT objectType_ ) - { - objectType = objectType_; - return *this; - } - - DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) - { - object = object_; - return *this; - } - - DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) - { - tagName = tagName_; - return *this; - } - - DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) - { - tagSize = tagSize_; - return *this; - } - - DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) - { - pTag = pTag_; - return *this; - } - - operator VkDebugMarkerObjectTagInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugMarkerObjectTagInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( object == rhs.object ) - && ( tagName == rhs.tagName ) - && ( tagSize == rhs.tagSize ) - && ( pTag == rhs.pTag ); - } - - bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugMarkerObjectTagInfoEXT::sType; - }; - static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugReportCallbackCreateInfoEXT - { - protected: - DebugReportCallbackCreateInfoEXT( vk::DebugReportFlagsEXT flags_ = vk::DebugReportFlagsEXT(), - PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, - void* pUserData_ = nullptr ) - : flags( flags_ ) - , pfnCallback( pfnCallback_ ) - , pUserData( pUserData_ ) - {} - - DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; - const void* pNext = nullptr; - vk::DebugReportFlagsEXT flags; - PFN_vkDebugReportCallbackEXT pfnCallback; - void* pUserData; - }; - static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugReportCallbackCreateInfoEXT : public layout::DebugReportCallbackCreateInfoEXT - { - DebugReportCallbackCreateInfoEXT( vk::DebugReportFlagsEXT flags_ = vk::DebugReportFlagsEXT(), - PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, - void* pUserData_ = nullptr ) - : layout::DebugReportCallbackCreateInfoEXT( flags_, pfnCallback_, pUserData_ ) - {} - - DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) - : layout::DebugReportCallbackCreateInfoEXT( rhs ) - {} - - DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugReportCallbackCreateInfoEXT & setFlags( vk::DebugReportFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) - { - pfnCallback = pfnCallback_; - return *this; - } - - DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) - { - pUserData = pUserData_; - return *this; - } - - operator VkDebugReportCallbackCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugReportCallbackCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pfnCallback == rhs.pfnCallback ) - && ( pUserData == rhs.pUserData ); - } - - bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugReportCallbackCreateInfoEXT::sType; - }; - static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugUtilsLabelEXT - { - protected: - DebugUtilsLabelEXT( const char* pLabelName_ = nullptr, - std::array const& color_ = { { 0 } } ) - : pLabelName( pLabelName_ ) - { - memcpy( &color, color_.data(), 4 * sizeof( float ) ); - - } - - DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugUtilsLabelEXT; - const void* pNext = nullptr; - const char* pLabelName; - float color[4]; - }; - static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugUtilsLabelEXT : public layout::DebugUtilsLabelEXT - { - DebugUtilsLabelEXT( const char* pLabelName_ = nullptr, - std::array const& color_ = { { 0 } } ) - : layout::DebugUtilsLabelEXT( pLabelName_, color_ ) - {} - - DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) - : layout::DebugUtilsLabelEXT( rhs ) - {} - - DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugUtilsLabelEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) - { - pLabelName = pLabelName_; - return *this; - } - - DebugUtilsLabelEXT & setColor( std::array color_ ) - { - memcpy( color, color_.data(), 4 * sizeof( float ) ); - return *this; - } - - operator VkDebugUtilsLabelEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugUtilsLabelEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugUtilsLabelEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pLabelName == rhs.pLabelName ) - && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); - } - - bool operator!=( DebugUtilsLabelEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugUtilsLabelEXT::sType; - }; - static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugUtilsObjectNameInfoEXT - { - protected: - DebugUtilsObjectNameInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown, - uint64_t objectHandle_ = 0, - const char* pObjectName_ = nullptr ) - : objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , pObjectName( pObjectName_ ) - {} - - DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; - const void* pNext = nullptr; - vk::ObjectType objectType; - uint64_t objectHandle; - const char* pObjectName; - }; - static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugUtilsObjectNameInfoEXT : public layout::DebugUtilsObjectNameInfoEXT - { - DebugUtilsObjectNameInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown, - uint64_t objectHandle_ = 0, - const char* pObjectName_ = nullptr ) - : layout::DebugUtilsObjectNameInfoEXT( objectType_, objectHandle_, pObjectName_ ) - {} - - DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) - : layout::DebugUtilsObjectNameInfoEXT( rhs ) - {} - - DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugUtilsObjectNameInfoEXT & setObjectType( vk::ObjectType objectType_ ) - { - objectType = objectType_; - return *this; - } - - DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) - { - objectHandle = objectHandle_; - return *this; - } - - DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) - { - pObjectName = pObjectName_; - return *this; - } - - operator VkDebugUtilsObjectNameInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugUtilsObjectNameInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( objectHandle == rhs.objectHandle ) - && ( pObjectName == rhs.pObjectName ); - } - - bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugUtilsObjectNameInfoEXT::sType; - }; - static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugUtilsMessengerCallbackDataEXT - { - protected: - DebugUtilsMessengerCallbackDataEXT( vk::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = vk::DebugUtilsMessengerCallbackDataFlagsEXT(), - const char* pMessageIdName_ = nullptr, - int32_t messageIdNumber_ = 0, - const char* pMessage_ = nullptr, - uint32_t queueLabelCount_ = 0, - const vk::DebugUtilsLabelEXT* pQueueLabels_ = nullptr, - uint32_t cmdBufLabelCount_ = 0, - const vk::DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr, - uint32_t objectCount_ = 0, - const vk::DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) - : flags( flags_ ) - , pMessageIdName( pMessageIdName_ ) - , messageIdNumber( messageIdNumber_ ) - , pMessage( pMessage_ ) - , queueLabelCount( queueLabelCount_ ) - , pQueueLabels( pQueueLabels_ ) - , cmdBufLabelCount( cmdBufLabelCount_ ) - , pCmdBufLabels( pCmdBufLabels_ ) - , objectCount( objectCount_ ) - , pObjects( pObjects_ ) - {} - - DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; - const void* pNext = nullptr; - vk::DebugUtilsMessengerCallbackDataFlagsEXT flags; - const char* pMessageIdName; - int32_t messageIdNumber; - const char* pMessage; - uint32_t queueLabelCount; - const vk::DebugUtilsLabelEXT* pQueueLabels; - uint32_t cmdBufLabelCount; - const vk::DebugUtilsLabelEXT* pCmdBufLabels; - uint32_t objectCount; - const vk::DebugUtilsObjectNameInfoEXT* pObjects; - }; - static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugUtilsMessengerCallbackDataEXT : public layout::DebugUtilsMessengerCallbackDataEXT - { - DebugUtilsMessengerCallbackDataEXT( vk::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = vk::DebugUtilsMessengerCallbackDataFlagsEXT(), - const char* pMessageIdName_ = nullptr, - int32_t messageIdNumber_ = 0, - const char* pMessage_ = nullptr, - uint32_t queueLabelCount_ = 0, - const vk::DebugUtilsLabelEXT* pQueueLabels_ = nullptr, - uint32_t cmdBufLabelCount_ = 0, - const vk::DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr, - uint32_t objectCount_ = 0, - const vk::DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) - : layout::DebugUtilsMessengerCallbackDataEXT( flags_, pMessageIdName_, messageIdNumber_, pMessage_, queueLabelCount_, pQueueLabels_, cmdBufLabelCount_, pCmdBufLabels_, objectCount_, pObjects_ ) - {} - - DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) - : layout::DebugUtilsMessengerCallbackDataEXT( rhs ) - {} - - DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setFlags( vk::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) - { - pMessageIdName = pMessageIdName_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) - { - messageIdNumber = messageIdNumber_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) - { - pMessage = pMessage_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) - { - queueLabelCount = queueLabelCount_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const vk::DebugUtilsLabelEXT* pQueueLabels_ ) - { - pQueueLabels = pQueueLabels_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) - { - cmdBufLabelCount = cmdBufLabelCount_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const vk::DebugUtilsLabelEXT* pCmdBufLabels_ ) - { - pCmdBufLabels = pCmdBufLabels_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) - { - objectCount = objectCount_; - return *this; - } - - DebugUtilsMessengerCallbackDataEXT & setPObjects( const vk::DebugUtilsObjectNameInfoEXT* pObjects_ ) - { - pObjects = pObjects_; - return *this; - } - - operator VkDebugUtilsMessengerCallbackDataEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugUtilsMessengerCallbackDataEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pMessageIdName == rhs.pMessageIdName ) - && ( messageIdNumber == rhs.messageIdNumber ) - && ( pMessage == rhs.pMessage ) - && ( queueLabelCount == rhs.queueLabelCount ) - && ( pQueueLabels == rhs.pQueueLabels ) - && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) - && ( pCmdBufLabels == rhs.pCmdBufLabels ) - && ( objectCount == rhs.objectCount ) - && ( pObjects == rhs.pObjects ); - } - - bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugUtilsMessengerCallbackDataEXT::sType; - }; - static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugUtilsMessengerCreateInfoEXT - { - protected: - DebugUtilsMessengerCreateInfoEXT( vk::DebugUtilsMessengerCreateFlagsEXT flags_ = vk::DebugUtilsMessengerCreateFlagsEXT(), - vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = vk::DebugUtilsMessageSeverityFlagsEXT(), - vk::DebugUtilsMessageTypeFlagsEXT messageType_ = vk::DebugUtilsMessageTypeFlagsEXT(), - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr, - void* pUserData_ = nullptr ) - : flags( flags_ ) - , messageSeverity( messageSeverity_ ) - , messageType( messageType_ ) - , pfnUserCallback( pfnUserCallback_ ) - , pUserData( pUserData_ ) - {} - - DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; - const void* pNext = nullptr; - vk::DebugUtilsMessengerCreateFlagsEXT flags; - vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity; - vk::DebugUtilsMessageTypeFlagsEXT messageType; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; - void* pUserData; - }; - static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugUtilsMessengerCreateInfoEXT : public layout::DebugUtilsMessengerCreateInfoEXT - { - DebugUtilsMessengerCreateInfoEXT( vk::DebugUtilsMessengerCreateFlagsEXT flags_ = vk::DebugUtilsMessengerCreateFlagsEXT(), - vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = vk::DebugUtilsMessageSeverityFlagsEXT(), - vk::DebugUtilsMessageTypeFlagsEXT messageType_ = vk::DebugUtilsMessageTypeFlagsEXT(), - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr, - void* pUserData_ = nullptr ) - : layout::DebugUtilsMessengerCreateInfoEXT( flags_, messageSeverity_, messageType_, pfnUserCallback_, pUserData_ ) - {} - - DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) - : layout::DebugUtilsMessengerCreateInfoEXT( rhs ) - {} - - DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugUtilsMessengerCreateInfoEXT & setFlags( vk::DebugUtilsMessengerCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) - { - messageSeverity = messageSeverity_; - return *this; - } - - DebugUtilsMessengerCreateInfoEXT & setMessageType( vk::DebugUtilsMessageTypeFlagsEXT messageType_ ) - { - messageType = messageType_; - return *this; - } - - DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) - { - pfnUserCallback = pfnUserCallback_; - return *this; - } - - DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) - { - pUserData = pUserData_; - return *this; - } - - operator VkDebugUtilsMessengerCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugUtilsMessengerCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( messageSeverity == rhs.messageSeverity ) - && ( messageType == rhs.messageType ) - && ( pfnUserCallback == rhs.pfnUserCallback ) - && ( pUserData == rhs.pUserData ); - } - - bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugUtilsMessengerCreateInfoEXT::sType; - }; - static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DebugUtilsObjectTagInfoEXT - { - protected: - DebugUtilsObjectTagInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown, - uint64_t objectHandle_ = 0, - uint64_t tagName_ = 0, - size_t tagSize_ = 0, - const void* pTag_ = nullptr ) - : objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) - {} - - DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; - const void* pNext = nullptr; - vk::ObjectType objectType; - uint64_t objectHandle; - uint64_t tagName; - size_t tagSize; - const void* pTag; - }; - static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DebugUtilsObjectTagInfoEXT : public layout::DebugUtilsObjectTagInfoEXT - { - DebugUtilsObjectTagInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown, - uint64_t objectHandle_ = 0, - uint64_t tagName_ = 0, - size_t tagSize_ = 0, - const void* pTag_ = nullptr ) - : layout::DebugUtilsObjectTagInfoEXT( objectType_, objectHandle_, tagName_, tagSize_, pTag_ ) - {} - - DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) - : layout::DebugUtilsObjectTagInfoEXT( rhs ) - {} - - DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DebugUtilsObjectTagInfoEXT & setObjectType( vk::ObjectType objectType_ ) - { - objectType = objectType_; - return *this; - } - - DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) - { - objectHandle = objectHandle_; - return *this; - } - - DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) - { - tagName = tagName_; - return *this; - } - - DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) - { - tagSize = tagSize_; - return *this; - } - - DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) - { - pTag = pTag_; - return *this; - } - - operator VkDebugUtilsObjectTagInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDebugUtilsObjectTagInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectType == rhs.objectType ) - && ( objectHandle == rhs.objectHandle ) - && ( tagName == rhs.tagName ) - && ( tagSize == rhs.tagSize ) - && ( pTag == rhs.pTag ); - } - - bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DebugUtilsObjectTagInfoEXT::sType; - }; - static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DedicatedAllocationBufferCreateInfoNV - { - protected: - DedicatedAllocationBufferCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) - : dedicatedAllocation( dedicatedAllocation_ ) - {} - - DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; - const void* pNext = nullptr; - vk::Bool32 dedicatedAllocation; - }; - static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct DedicatedAllocationBufferCreateInfoNV : public layout::DedicatedAllocationBufferCreateInfoNV - { - DedicatedAllocationBufferCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) - : layout::DedicatedAllocationBufferCreateInfoNV( dedicatedAllocation_ ) - {} - - DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) - : layout::DedicatedAllocationBufferCreateInfoNV( rhs ) - {} - - DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( vk::Bool32 dedicatedAllocation_ ) - { - dedicatedAllocation = dedicatedAllocation_; - return *this; - } - - operator VkDedicatedAllocationBufferCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDedicatedAllocationBufferCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocation == rhs.dedicatedAllocation ); - } - - bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DedicatedAllocationBufferCreateInfoNV::sType; - }; - static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DedicatedAllocationImageCreateInfoNV - { - protected: - DedicatedAllocationImageCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) - : dedicatedAllocation( dedicatedAllocation_ ) - {} - - DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; - const void* pNext = nullptr; - vk::Bool32 dedicatedAllocation; - }; - static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct DedicatedAllocationImageCreateInfoNV : public layout::DedicatedAllocationImageCreateInfoNV - { - DedicatedAllocationImageCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) - : layout::DedicatedAllocationImageCreateInfoNV( dedicatedAllocation_ ) - {} - - DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) - : layout::DedicatedAllocationImageCreateInfoNV( rhs ) - {} - - DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( vk::Bool32 dedicatedAllocation_ ) - { - dedicatedAllocation = dedicatedAllocation_; - return *this; - } - - operator VkDedicatedAllocationImageCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDedicatedAllocationImageCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocation == rhs.dedicatedAllocation ); - } - - bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DedicatedAllocationImageCreateInfoNV::sType; - }; - static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DedicatedAllocationMemoryAllocateInfoNV - { - protected: - DedicatedAllocationMemoryAllocateInfoNV( vk::Image image_ = vk::Image(), - vk::Buffer buffer_ = vk::Buffer() ) - : image( image_ ) - , buffer( buffer_ ) - {} - - DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; - const void* pNext = nullptr; - vk::Image image; - vk::Buffer buffer; - }; - static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct DedicatedAllocationMemoryAllocateInfoNV : public layout::DedicatedAllocationMemoryAllocateInfoNV - { - DedicatedAllocationMemoryAllocateInfoNV( vk::Image image_ = vk::Image(), - vk::Buffer buffer_ = vk::Buffer() ) - : layout::DedicatedAllocationMemoryAllocateInfoNV( image_, buffer_ ) - {} - - DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) - : layout::DedicatedAllocationMemoryAllocateInfoNV( rhs ) - {} - - DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DedicatedAllocationMemoryAllocateInfoNV & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - DedicatedAllocationMemoryAllocateInfoNV & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDedicatedAllocationMemoryAllocateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DedicatedAllocationMemoryAllocateInfoNV::sType; - }; - static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DescriptorBufferInfo - { - DescriptorBufferInfo( vk::Buffer buffer_ = vk::Buffer(), - vk::DeviceSize offset_ = 0, - vk::DeviceSize range_ = 0 ) - : buffer( buffer_ ) - , offset( offset_ ) - , range( range_ ) - {} - - DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorBufferInfo & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - DescriptorBufferInfo & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - DescriptorBufferInfo & setRange( vk::DeviceSize range_ ) - { - range = range_; - return *this; - } - - operator VkDescriptorBufferInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorBufferInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorBufferInfo const& rhs ) const - { - return ( buffer == rhs.buffer ) - && ( offset == rhs.offset ) - && ( range == rhs.range ); - } - - bool operator!=( DescriptorBufferInfo const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Buffer buffer; - vk::DeviceSize offset; - vk::DeviceSize range; - }; - static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DescriptorImageInfo - { - DescriptorImageInfo( vk::Sampler sampler_ = vk::Sampler(), - vk::ImageView imageView_ = vk::ImageView(), - vk::ImageLayout imageLayout_ = vk::ImageLayout::eUndefined ) - : sampler( sampler_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - {} - - DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorImageInfo & setSampler( vk::Sampler sampler_ ) - { - sampler = sampler_; - return *this; - } - - DescriptorImageInfo & setImageView( vk::ImageView imageView_ ) - { - imageView = imageView_; - return *this; - } - - DescriptorImageInfo & setImageLayout( vk::ImageLayout imageLayout_ ) - { - imageLayout = imageLayout_; - return *this; - } - - operator VkDescriptorImageInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorImageInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorImageInfo const& rhs ) const - { - return ( sampler == rhs.sampler ) - && ( imageView == rhs.imageView ) - && ( imageLayout == rhs.imageLayout ); - } - - bool operator!=( DescriptorImageInfo const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Sampler sampler; - vk::ImageView imageView; - vk::ImageLayout imageLayout; - }; - static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DescriptorPoolSize - { - DescriptorPoolSize( vk::DescriptorType type_ = vk::DescriptorType::eSampler, - uint32_t descriptorCount_ = 0 ) - : type( type_ ) - , descriptorCount( descriptorCount_ ) - {} - - DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorPoolSize & setType( vk::DescriptorType type_ ) - { - type = type_; - return *this; - } - - DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) - { - descriptorCount = descriptorCount_; - return *this; - } - - operator VkDescriptorPoolSize const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorPoolSize &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorPoolSize const& rhs ) const - { - return ( type == rhs.type ) - && ( descriptorCount == rhs.descriptorCount ); - } - - bool operator!=( DescriptorPoolSize const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DescriptorType type; - uint32_t descriptorCount; - }; - static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorPoolCreateInfo - { - protected: - DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlags flags_ = vk::DescriptorPoolCreateFlags(), - uint32_t maxSets_ = 0, - uint32_t poolSizeCount_ = 0, - const vk::DescriptorPoolSize* pPoolSizes_ = nullptr ) - : flags( flags_ ) - , maxSets( maxSets_ ) - , poolSizeCount( poolSizeCount_ ) - , pPoolSizes( pPoolSizes_ ) - {} - - DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorPoolCreateInfo; - const void* pNext = nullptr; - vk::DescriptorPoolCreateFlags flags; - uint32_t maxSets; - uint32_t poolSizeCount; - const vk::DescriptorPoolSize* pPoolSizes; - }; - static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorPoolCreateInfo : public layout::DescriptorPoolCreateInfo - { - DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlags flags_ = vk::DescriptorPoolCreateFlags(), - uint32_t maxSets_ = 0, - uint32_t poolSizeCount_ = 0, - const vk::DescriptorPoolSize* pPoolSizes_ = nullptr ) - : layout::DescriptorPoolCreateInfo( flags_, maxSets_, poolSizeCount_, pPoolSizes_ ) - {} - - DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) - : layout::DescriptorPoolCreateInfo( rhs ) - {} - - DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorPoolCreateInfo & setFlags( vk::DescriptorPoolCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) - { - maxSets = maxSets_; - return *this; - } - - DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) - { - poolSizeCount = poolSizeCount_; - return *this; - } - - DescriptorPoolCreateInfo & setPPoolSizes( const vk::DescriptorPoolSize* pPoolSizes_ ) - { - pPoolSizes = pPoolSizes_; - return *this; - } - - operator VkDescriptorPoolCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorPoolCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorPoolCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( maxSets == rhs.maxSets ) - && ( poolSizeCount == rhs.poolSizeCount ) - && ( pPoolSizes == rhs.pPoolSizes ); - } - - bool operator!=( DescriptorPoolCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorPoolCreateInfo::sType; - }; - static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorPoolInlineUniformBlockCreateInfoEXT - { - protected: - DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 ) - : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) - {} - - DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; - const void* pNext = nullptr; - uint32_t maxInlineUniformBlockBindings; - }; - static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorPoolInlineUniformBlockCreateInfoEXT : public layout::DescriptorPoolInlineUniformBlockCreateInfoEXT - { - DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 ) - : layout::DescriptorPoolInlineUniformBlockCreateInfoEXT( maxInlineUniformBlockBindings_ ) - {} - - DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) - : layout::DescriptorPoolInlineUniformBlockCreateInfoEXT( rhs ) - {} - - DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) - { - maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; - return *this; - } - - operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); - } - - bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorPoolInlineUniformBlockCreateInfoEXT::sType; - }; - static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorSetAllocateInfo - { - protected: - DescriptorSetAllocateInfo( vk::DescriptorPool descriptorPool_ = vk::DescriptorPool(), - uint32_t descriptorSetCount_ = 0, - const vk::DescriptorSetLayout* pSetLayouts_ = nullptr ) - : descriptorPool( descriptorPool_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pSetLayouts( pSetLayouts_ ) - {} - - DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorSetAllocateInfo; - const void* pNext = nullptr; - vk::DescriptorPool descriptorPool; - uint32_t descriptorSetCount; - const vk::DescriptorSetLayout* pSetLayouts; - }; - static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorSetAllocateInfo : public layout::DescriptorSetAllocateInfo - { - DescriptorSetAllocateInfo( vk::DescriptorPool descriptorPool_ = vk::DescriptorPool(), - uint32_t descriptorSetCount_ = 0, - const vk::DescriptorSetLayout* pSetLayouts_ = nullptr ) - : layout::DescriptorSetAllocateInfo( descriptorPool_, descriptorSetCount_, pSetLayouts_ ) - {} - - DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) - : layout::DescriptorSetAllocateInfo( rhs ) - {} - - DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorSetAllocateInfo & setDescriptorPool( vk::DescriptorPool descriptorPool_ ) - { - descriptorPool = descriptorPool_; - return *this; - } - - DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) - { - descriptorSetCount = descriptorSetCount_; - return *this; - } - - DescriptorSetAllocateInfo & setPSetLayouts( const vk::DescriptorSetLayout* pSetLayouts_ ) - { - pSetLayouts = pSetLayouts_; - return *this; - } - - operator VkDescriptorSetAllocateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetAllocateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetAllocateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( descriptorPool == rhs.descriptorPool ) - && ( descriptorSetCount == rhs.descriptorSetCount ) - && ( pSetLayouts == rhs.pSetLayouts ); - } - - bool operator!=( DescriptorSetAllocateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorSetAllocateInfo::sType; - }; - static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DescriptorSetLayoutBinding - { - DescriptorSetLayoutBinding( uint32_t binding_ = 0, - vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler, - uint32_t descriptorCount_ = 0, - vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags(), - const vk::Sampler* pImmutableSamplers_ = nullptr ) - : binding( binding_ ) - , descriptorType( descriptorType_ ) - , descriptorCount( descriptorCount_ ) - , stageFlags( stageFlags_ ) - , pImmutableSamplers( pImmutableSamplers_ ) - {} - - DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) - { - binding = binding_; - return *this; - } - - DescriptorSetLayoutBinding & setDescriptorType( vk::DescriptorType descriptorType_ ) - { - descriptorType = descriptorType_; - return *this; - } - - DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) - { - descriptorCount = descriptorCount_; - return *this; - } - - DescriptorSetLayoutBinding & setStageFlags( vk::ShaderStageFlags stageFlags_ ) - { - stageFlags = stageFlags_; - return *this; - } - - DescriptorSetLayoutBinding & setPImmutableSamplers( const vk::Sampler* pImmutableSamplers_ ) - { - pImmutableSamplers = pImmutableSamplers_; - return *this; - } - - operator VkDescriptorSetLayoutBinding const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetLayoutBinding &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetLayoutBinding const& rhs ) const - { - return ( binding == rhs.binding ) - && ( descriptorType == rhs.descriptorType ) - && ( descriptorCount == rhs.descriptorCount ) - && ( stageFlags == rhs.stageFlags ) - && ( pImmutableSamplers == rhs.pImmutableSamplers ); - } - - bool operator!=( DescriptorSetLayoutBinding const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t binding; - vk::DescriptorType descriptorType; - uint32_t descriptorCount; - vk::ShaderStageFlags stageFlags; - const vk::Sampler* pImmutableSamplers; - }; - static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorSetLayoutBindingFlagsCreateInfoEXT - { - protected: - DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0, - const vk::DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) - : bindingCount( bindingCount_ ) - , pBindingFlags( pBindingFlags_ ) - {} - - DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT; - const void* pNext = nullptr; - uint32_t bindingCount; - const vk::DescriptorBindingFlagsEXT* pBindingFlags; - }; - static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorSetLayoutBindingFlagsCreateInfoEXT : public layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT - { - DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0, - const vk::DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) - : layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT( bindingCount_, pBindingFlags_ ) - {} - - DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) - : layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT( rhs ) - {} - - DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorSetLayoutBindingFlagsCreateInfoEXT & setBindingCount( uint32_t bindingCount_ ) - { - bindingCount = bindingCount_; - return *this; - } - - DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPBindingFlags( const vk::DescriptorBindingFlagsEXT* pBindingFlags_ ) - { - pBindingFlags = pBindingFlags_; - return *this; - } - - operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bindingCount == rhs.bindingCount ) - && ( pBindingFlags == rhs.pBindingFlags ); - } - - bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT::sType; - }; - static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorSetLayoutCreateInfo - { - protected: - DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags flags_ = vk::DescriptorSetLayoutCreateFlags(), - uint32_t bindingCount_ = 0, - const vk::DescriptorSetLayoutBinding* pBindings_ = nullptr ) - : flags( flags_ ) - , bindingCount( bindingCount_ ) - , pBindings( pBindings_ ) - {} - - DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; - const void* pNext = nullptr; - vk::DescriptorSetLayoutCreateFlags flags; - uint32_t bindingCount; - const vk::DescriptorSetLayoutBinding* pBindings; - }; - static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorSetLayoutCreateInfo : public layout::DescriptorSetLayoutCreateInfo - { - DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags flags_ = vk::DescriptorSetLayoutCreateFlags(), - uint32_t bindingCount_ = 0, - const vk::DescriptorSetLayoutBinding* pBindings_ = nullptr ) - : layout::DescriptorSetLayoutCreateInfo( flags_, bindingCount_, pBindings_ ) - {} - - DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) - : layout::DescriptorSetLayoutCreateInfo( rhs ) - {} - - DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorSetLayoutCreateInfo & setFlags( vk::DescriptorSetLayoutCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) - { - bindingCount = bindingCount_; - return *this; - } - - DescriptorSetLayoutCreateInfo & setPBindings( const vk::DescriptorSetLayoutBinding* pBindings_ ) - { - pBindings = pBindings_; - return *this; - } - - operator VkDescriptorSetLayoutCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetLayoutCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( bindingCount == rhs.bindingCount ) - && ( pBindings == rhs.pBindings ); - } - - bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorSetLayoutCreateInfo::sType; - }; - static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorSetLayoutSupport - { - protected: - DescriptorSetLayoutSupport( vk::Bool32 supported_ = 0 ) - : supported( supported_ ) - {} - - DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorSetLayoutSupport; - void* pNext = nullptr; - vk::Bool32 supported; - }; - static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorSetLayoutSupport : public layout::DescriptorSetLayoutSupport - { - operator VkDescriptorSetLayoutSupport const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetLayoutSupport &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetLayoutSupport const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supported == rhs.supported ); - } - - bool operator!=( DescriptorSetLayoutSupport const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorSetLayoutSupport::sType; - }; - static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorSetVariableDescriptorCountAllocateInfoEXT - { - protected: - DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0, - const uint32_t* pDescriptorCounts_ = nullptr ) - : descriptorSetCount( descriptorSetCount_ ) - , pDescriptorCounts( pDescriptorCounts_ ) - {} - - DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT; - const void* pNext = nullptr; - uint32_t descriptorSetCount; - const uint32_t* pDescriptorCounts; - }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorSetVariableDescriptorCountAllocateInfoEXT : public layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT - { - DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0, - const uint32_t* pDescriptorCounts_ = nullptr ) - : layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT( descriptorSetCount_, pDescriptorCounts_ ) - {} - - DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) - : layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT( rhs ) - {} - - DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorSetVariableDescriptorCountAllocateInfoEXT & setDescriptorSetCount( uint32_t descriptorSetCount_ ) - { - descriptorSetCount = descriptorSetCount_; - return *this; - } - - DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) - { - pDescriptorCounts = pDescriptorCounts_; - return *this; - } - - operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( descriptorSetCount == rhs.descriptorSetCount ) - && ( pDescriptorCounts == rhs.pDescriptorCounts ); - } - - bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT::sType; - }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorSetVariableDescriptorCountLayoutSupportEXT - { - protected: - DescriptorSetVariableDescriptorCountLayoutSupportEXT( uint32_t maxVariableDescriptorCount_ = 0 ) - : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) - {} - - DescriptorSetVariableDescriptorCountLayoutSupportEXT( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT; - void* pNext = nullptr; - uint32_t maxVariableDescriptorCount; - }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorSetVariableDescriptorCountLayoutSupportEXT : public layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT - { - operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); - } - - bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT::sType; - }; - static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DescriptorUpdateTemplateEntry - { - DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0, - uint32_t dstArrayElement_ = 0, - uint32_t descriptorCount_ = 0, - vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler, - size_t offset_ = 0, - size_t stride_ = 0 ) - : dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , offset( offset_ ) - , stride( stride_ ) - {} - - DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) - { - dstBinding = dstBinding_; - return *this; - } - - DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) - { - dstArrayElement = dstArrayElement_; - return *this; - } - - DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) - { - descriptorCount = descriptorCount_; - return *this; - } - - DescriptorUpdateTemplateEntry & setDescriptorType( vk::DescriptorType descriptorType_ ) - { - descriptorType = descriptorType_; - return *this; - } - - DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) - { - offset = offset_; - return *this; - } - - DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) - { - stride = stride_; - return *this; - } - - operator VkDescriptorUpdateTemplateEntry const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorUpdateTemplateEntry &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const - { - return ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( offset == rhs.offset ) - && ( stride == rhs.stride ); - } - - bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - vk::DescriptorType descriptorType; - size_t offset; - size_t stride; - }; - static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DescriptorUpdateTemplateCreateInfo - { - protected: - DescriptorUpdateTemplateCreateInfo( vk::DescriptorUpdateTemplateCreateFlags flags_ = vk::DescriptorUpdateTemplateCreateFlags(), - uint32_t descriptorUpdateEntryCount_ = 0, - const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr, - vk::DescriptorUpdateTemplateType templateType_ = vk::DescriptorUpdateTemplateType::eDescriptorSet, - vk::DescriptorSetLayout descriptorSetLayout_ = vk::DescriptorSetLayout(), - vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(), - uint32_t set_ = 0 ) - : flags( flags_ ) - , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) - , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) - , templateType( templateType_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipelineLayout( pipelineLayout_ ) - , set( set_ ) - {} - - DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; - const void* pNext = nullptr; - vk::DescriptorUpdateTemplateCreateFlags flags; - uint32_t descriptorUpdateEntryCount; - const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; - vk::DescriptorUpdateTemplateType templateType; - vk::DescriptorSetLayout descriptorSetLayout; - vk::PipelineBindPoint pipelineBindPoint; - vk::PipelineLayout pipelineLayout; - uint32_t set; - }; - static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DescriptorUpdateTemplateCreateInfo : public layout::DescriptorUpdateTemplateCreateInfo - { - DescriptorUpdateTemplateCreateInfo( vk::DescriptorUpdateTemplateCreateFlags flags_ = vk::DescriptorUpdateTemplateCreateFlags(), - uint32_t descriptorUpdateEntryCount_ = 0, - const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr, - vk::DescriptorUpdateTemplateType templateType_ = vk::DescriptorUpdateTemplateType::eDescriptorSet, - vk::DescriptorSetLayout descriptorSetLayout_ = vk::DescriptorSetLayout(), - vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(), - uint32_t set_ = 0 ) - : layout::DescriptorUpdateTemplateCreateInfo( flags_, descriptorUpdateEntryCount_, pDescriptorUpdateEntries_, templateType_, descriptorSetLayout_, pipelineBindPoint_, pipelineLayout_, set_ ) - {} - - DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) - : layout::DescriptorUpdateTemplateCreateInfo( rhs ) - {} - - DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setFlags( vk::DescriptorUpdateTemplateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) - { - descriptorUpdateEntryCount = descriptorUpdateEntryCount_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) - { - pDescriptorUpdateEntries = pDescriptorUpdateEntries_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setTemplateType( vk::DescriptorUpdateTemplateType templateType_ ) - { - templateType = templateType_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout_ ) - { - descriptorSetLayout = descriptorSetLayout_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setPipelineLayout( vk::PipelineLayout pipelineLayout_ ) - { - pipelineLayout = pipelineLayout_; - return *this; - } - - DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) - { - set = set_; - return *this; - } - - operator VkDescriptorUpdateTemplateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDescriptorUpdateTemplateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) - && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) - && ( templateType == rhs.templateType ) - && ( descriptorSetLayout == rhs.descriptorSetLayout ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( set == rhs.set ); - } - - bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DescriptorUpdateTemplateCreateInfo::sType; - }; - static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceQueueCreateInfo - { - protected: - DeviceQueueCreateInfo( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(), - uint32_t queueFamilyIndex_ = 0, - uint32_t queueCount_ = 0, - const float* pQueuePriorities_ = nullptr ) - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueCount( queueCount_ ) - , pQueuePriorities( pQueuePriorities_ ) - {} - - DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceQueueCreateInfo; - const void* pNext = nullptr; - vk::DeviceQueueCreateFlags flags; - uint32_t queueFamilyIndex; - uint32_t queueCount; - const float* pQueuePriorities; - }; - static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceQueueCreateInfo : public layout::DeviceQueueCreateInfo - { - DeviceQueueCreateInfo( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(), - uint32_t queueFamilyIndex_ = 0, - uint32_t queueCount_ = 0, - const float* pQueuePriorities_ = nullptr ) - : layout::DeviceQueueCreateInfo( flags_, queueFamilyIndex_, queueCount_, pQueuePriorities_ ) - {} - - DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) - : layout::DeviceQueueCreateInfo( rhs ) - {} - - DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceQueueCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceQueueCreateInfo & setFlags( vk::DeviceQueueCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) - { - queueCount = queueCount_; - return *this; - } - - DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) - { - pQueuePriorities = pQueuePriorities_; - return *this; - } - - operator VkDeviceQueueCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceQueueCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueCount == rhs.queueCount ) - && ( pQueuePriorities == rhs.pQueuePriorities ); - } - - bool operator!=( DeviceQueueCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceQueueCreateInfo::sType; - }; - static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceFeatures - { - PhysicalDeviceFeatures( vk::Bool32 robustBufferAccess_ = 0, - vk::Bool32 fullDrawIndexUint32_ = 0, - vk::Bool32 imageCubeArray_ = 0, - vk::Bool32 independentBlend_ = 0, - vk::Bool32 geometryShader_ = 0, - vk::Bool32 tessellationShader_ = 0, - vk::Bool32 sampleRateShading_ = 0, - vk::Bool32 dualSrcBlend_ = 0, - vk::Bool32 logicOp_ = 0, - vk::Bool32 multiDrawIndirect_ = 0, - vk::Bool32 drawIndirectFirstInstance_ = 0, - vk::Bool32 depthClamp_ = 0, - vk::Bool32 depthBiasClamp_ = 0, - vk::Bool32 fillModeNonSolid_ = 0, - vk::Bool32 depthBounds_ = 0, - vk::Bool32 wideLines_ = 0, - vk::Bool32 largePoints_ = 0, - vk::Bool32 alphaToOne_ = 0, - vk::Bool32 multiViewport_ = 0, - vk::Bool32 samplerAnisotropy_ = 0, - vk::Bool32 textureCompressionETC2_ = 0, - vk::Bool32 textureCompressionASTC_LDR_ = 0, - vk::Bool32 textureCompressionBC_ = 0, - vk::Bool32 occlusionQueryPrecise_ = 0, - vk::Bool32 pipelineStatisticsQuery_ = 0, - vk::Bool32 vertexPipelineStoresAndAtomics_ = 0, - vk::Bool32 fragmentStoresAndAtomics_ = 0, - vk::Bool32 shaderTessellationAndGeometryPointSize_ = 0, - vk::Bool32 shaderImageGatherExtended_ = 0, - vk::Bool32 shaderStorageImageExtendedFormats_ = 0, - vk::Bool32 shaderStorageImageMultisample_ = 0, - vk::Bool32 shaderStorageImageReadWithoutFormat_ = 0, - vk::Bool32 shaderStorageImageWriteWithoutFormat_ = 0, - vk::Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, - vk::Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, - vk::Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, - vk::Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, - vk::Bool32 shaderClipDistance_ = 0, - vk::Bool32 shaderCullDistance_ = 0, - vk::Bool32 shaderFloat64_ = 0, - vk::Bool32 shaderInt64_ = 0, - vk::Bool32 shaderInt16_ = 0, - vk::Bool32 shaderResourceResidency_ = 0, - vk::Bool32 shaderResourceMinLod_ = 0, - vk::Bool32 sparseBinding_ = 0, - vk::Bool32 sparseResidencyBuffer_ = 0, - vk::Bool32 sparseResidencyImage2D_ = 0, - vk::Bool32 sparseResidencyImage3D_ = 0, - vk::Bool32 sparseResidency2Samples_ = 0, - vk::Bool32 sparseResidency4Samples_ = 0, - vk::Bool32 sparseResidency8Samples_ = 0, - vk::Bool32 sparseResidency16Samples_ = 0, - vk::Bool32 sparseResidencyAliased_ = 0, - vk::Bool32 variableMultisampleRate_ = 0, - vk::Bool32 inheritedQueries_ = 0 ) - : robustBufferAccess( robustBufferAccess_ ) - , fullDrawIndexUint32( fullDrawIndexUint32_ ) - , imageCubeArray( imageCubeArray_ ) - , independentBlend( independentBlend_ ) - , geometryShader( geometryShader_ ) - , tessellationShader( tessellationShader_ ) - , sampleRateShading( sampleRateShading_ ) - , dualSrcBlend( dualSrcBlend_ ) - , logicOp( logicOp_ ) - , multiDrawIndirect( multiDrawIndirect_ ) - , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) - , depthClamp( depthClamp_ ) - , depthBiasClamp( depthBiasClamp_ ) - , fillModeNonSolid( fillModeNonSolid_ ) - , depthBounds( depthBounds_ ) - , wideLines( wideLines_ ) - , largePoints( largePoints_ ) - , alphaToOne( alphaToOne_ ) - , multiViewport( multiViewport_ ) - , samplerAnisotropy( samplerAnisotropy_ ) - , textureCompressionETC2( textureCompressionETC2_ ) - , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) - , textureCompressionBC( textureCompressionBC_ ) - , occlusionQueryPrecise( occlusionQueryPrecise_ ) - , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) - , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) - , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) - , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) - , shaderImageGatherExtended( shaderImageGatherExtended_ ) - , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) - , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) - , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) - , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) - , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) - , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) - , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) - , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) - , shaderClipDistance( shaderClipDistance_ ) - , shaderCullDistance( shaderCullDistance_ ) - , shaderFloat64( shaderFloat64_ ) - , shaderInt64( shaderInt64_ ) - , shaderInt16( shaderInt16_ ) - , shaderResourceResidency( shaderResourceResidency_ ) - , shaderResourceMinLod( shaderResourceMinLod_ ) - , sparseBinding( sparseBinding_ ) - , sparseResidencyBuffer( sparseResidencyBuffer_ ) - , sparseResidencyImage2D( sparseResidencyImage2D_ ) - , sparseResidencyImage3D( sparseResidencyImage3D_ ) - , sparseResidency2Samples( sparseResidency2Samples_ ) - , sparseResidency4Samples( sparseResidency4Samples_ ) - , sparseResidency8Samples( sparseResidency8Samples_ ) - , sparseResidency16Samples( sparseResidency16Samples_ ) - , sparseResidencyAliased( sparseResidencyAliased_ ) - , variableMultisampleRate( variableMultisampleRate_ ) - , inheritedQueries( inheritedQueries_ ) - {} - - PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceFeatures & setRobustBufferAccess( vk::Bool32 robustBufferAccess_ ) - { - robustBufferAccess = robustBufferAccess_; - return *this; - } - - PhysicalDeviceFeatures & setFullDrawIndexUint32( vk::Bool32 fullDrawIndexUint32_ ) - { - fullDrawIndexUint32 = fullDrawIndexUint32_; - return *this; - } - - PhysicalDeviceFeatures & setImageCubeArray( vk::Bool32 imageCubeArray_ ) - { - imageCubeArray = imageCubeArray_; - return *this; - } - - PhysicalDeviceFeatures & setIndependentBlend( vk::Bool32 independentBlend_ ) - { - independentBlend = independentBlend_; - return *this; - } - - PhysicalDeviceFeatures & setGeometryShader( vk::Bool32 geometryShader_ ) - { - geometryShader = geometryShader_; - return *this; - } - - PhysicalDeviceFeatures & setTessellationShader( vk::Bool32 tessellationShader_ ) - { - tessellationShader = tessellationShader_; - return *this; - } - - PhysicalDeviceFeatures & setSampleRateShading( vk::Bool32 sampleRateShading_ ) - { - sampleRateShading = sampleRateShading_; - return *this; - } - - PhysicalDeviceFeatures & setDualSrcBlend( vk::Bool32 dualSrcBlend_ ) - { - dualSrcBlend = dualSrcBlend_; - return *this; - } - - PhysicalDeviceFeatures & setLogicOp( vk::Bool32 logicOp_ ) - { - logicOp = logicOp_; - return *this; - } - - PhysicalDeviceFeatures & setMultiDrawIndirect( vk::Bool32 multiDrawIndirect_ ) - { - multiDrawIndirect = multiDrawIndirect_; - return *this; - } - - PhysicalDeviceFeatures & setDrawIndirectFirstInstance( vk::Bool32 drawIndirectFirstInstance_ ) - { - drawIndirectFirstInstance = drawIndirectFirstInstance_; - return *this; - } - - PhysicalDeviceFeatures & setDepthClamp( vk::Bool32 depthClamp_ ) - { - depthClamp = depthClamp_; - return *this; - } - - PhysicalDeviceFeatures & setDepthBiasClamp( vk::Bool32 depthBiasClamp_ ) - { - depthBiasClamp = depthBiasClamp_; - return *this; - } - - PhysicalDeviceFeatures & setFillModeNonSolid( vk::Bool32 fillModeNonSolid_ ) - { - fillModeNonSolid = fillModeNonSolid_; - return *this; - } - - PhysicalDeviceFeatures & setDepthBounds( vk::Bool32 depthBounds_ ) - { - depthBounds = depthBounds_; - return *this; - } - - PhysicalDeviceFeatures & setWideLines( vk::Bool32 wideLines_ ) - { - wideLines = wideLines_; - return *this; - } - - PhysicalDeviceFeatures & setLargePoints( vk::Bool32 largePoints_ ) - { - largePoints = largePoints_; - return *this; - } - - PhysicalDeviceFeatures & setAlphaToOne( vk::Bool32 alphaToOne_ ) - { - alphaToOne = alphaToOne_; - return *this; - } - - PhysicalDeviceFeatures & setMultiViewport( vk::Bool32 multiViewport_ ) - { - multiViewport = multiViewport_; - return *this; - } - - PhysicalDeviceFeatures & setSamplerAnisotropy( vk::Bool32 samplerAnisotropy_ ) - { - samplerAnisotropy = samplerAnisotropy_; - return *this; - } - - PhysicalDeviceFeatures & setTextureCompressionETC2( vk::Bool32 textureCompressionETC2_ ) - { - textureCompressionETC2 = textureCompressionETC2_; - return *this; - } - - PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( vk::Bool32 textureCompressionASTC_LDR_ ) - { - textureCompressionASTC_LDR = textureCompressionASTC_LDR_; - return *this; - } - - PhysicalDeviceFeatures & setTextureCompressionBC( vk::Bool32 textureCompressionBC_ ) - { - textureCompressionBC = textureCompressionBC_; - return *this; - } - - PhysicalDeviceFeatures & setOcclusionQueryPrecise( vk::Bool32 occlusionQueryPrecise_ ) - { - occlusionQueryPrecise = occlusionQueryPrecise_; - return *this; - } - - PhysicalDeviceFeatures & setPipelineStatisticsQuery( vk::Bool32 pipelineStatisticsQuery_ ) - { - pipelineStatisticsQuery = pipelineStatisticsQuery_; - return *this; - } - - PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( vk::Bool32 vertexPipelineStoresAndAtomics_ ) - { - vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; - return *this; - } - - PhysicalDeviceFeatures & setFragmentStoresAndAtomics( vk::Bool32 fragmentStoresAndAtomics_ ) - { - fragmentStoresAndAtomics = fragmentStoresAndAtomics_; - return *this; - } - - PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( vk::Bool32 shaderTessellationAndGeometryPointSize_ ) - { - shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; - return *this; - } - - PhysicalDeviceFeatures & setShaderImageGatherExtended( vk::Bool32 shaderImageGatherExtended_ ) - { - shaderImageGatherExtended = shaderImageGatherExtended_; - return *this; - } - - PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( vk::Bool32 shaderStorageImageExtendedFormats_ ) - { - shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; - return *this; - } - - PhysicalDeviceFeatures & setShaderStorageImageMultisample( vk::Bool32 shaderStorageImageMultisample_ ) - { - shaderStorageImageMultisample = shaderStorageImageMultisample_; - return *this; - } - - PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( vk::Bool32 shaderStorageImageReadWithoutFormat_ ) - { - shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; - return *this; - } - - PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( vk::Bool32 shaderStorageImageWriteWithoutFormat_ ) - { - shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; - return *this; - } - - PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( vk::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) - { - shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( vk::Bool32 shaderSampledImageArrayDynamicIndexing_ ) - { - shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( vk::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) - { - shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( vk::Bool32 shaderStorageImageArrayDynamicIndexing_ ) - { - shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceFeatures & setShaderClipDistance( vk::Bool32 shaderClipDistance_ ) - { - shaderClipDistance = shaderClipDistance_; - return *this; - } - - PhysicalDeviceFeatures & setShaderCullDistance( vk::Bool32 shaderCullDistance_ ) - { - shaderCullDistance = shaderCullDistance_; - return *this; - } - - PhysicalDeviceFeatures & setShaderFloat64( vk::Bool32 shaderFloat64_ ) - { - shaderFloat64 = shaderFloat64_; - return *this; - } - - PhysicalDeviceFeatures & setShaderInt64( vk::Bool32 shaderInt64_ ) - { - shaderInt64 = shaderInt64_; - return *this; - } - - PhysicalDeviceFeatures & setShaderInt16( vk::Bool32 shaderInt16_ ) - { - shaderInt16 = shaderInt16_; - return *this; - } - - PhysicalDeviceFeatures & setShaderResourceResidency( vk::Bool32 shaderResourceResidency_ ) - { - shaderResourceResidency = shaderResourceResidency_; - return *this; - } - - PhysicalDeviceFeatures & setShaderResourceMinLod( vk::Bool32 shaderResourceMinLod_ ) - { - shaderResourceMinLod = shaderResourceMinLod_; - return *this; - } - - PhysicalDeviceFeatures & setSparseBinding( vk::Bool32 sparseBinding_ ) - { - sparseBinding = sparseBinding_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidencyBuffer( vk::Bool32 sparseResidencyBuffer_ ) - { - sparseResidencyBuffer = sparseResidencyBuffer_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidencyImage2D( vk::Bool32 sparseResidencyImage2D_ ) - { - sparseResidencyImage2D = sparseResidencyImage2D_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidencyImage3D( vk::Bool32 sparseResidencyImage3D_ ) - { - sparseResidencyImage3D = sparseResidencyImage3D_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidency2Samples( vk::Bool32 sparseResidency2Samples_ ) - { - sparseResidency2Samples = sparseResidency2Samples_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidency4Samples( vk::Bool32 sparseResidency4Samples_ ) - { - sparseResidency4Samples = sparseResidency4Samples_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidency8Samples( vk::Bool32 sparseResidency8Samples_ ) - { - sparseResidency8Samples = sparseResidency8Samples_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidency16Samples( vk::Bool32 sparseResidency16Samples_ ) - { - sparseResidency16Samples = sparseResidency16Samples_; - return *this; - } - - PhysicalDeviceFeatures & setSparseResidencyAliased( vk::Bool32 sparseResidencyAliased_ ) - { - sparseResidencyAliased = sparseResidencyAliased_; - return *this; - } - - PhysicalDeviceFeatures & setVariableMultisampleRate( vk::Bool32 variableMultisampleRate_ ) - { - variableMultisampleRate = variableMultisampleRate_; - return *this; - } - - PhysicalDeviceFeatures & setInheritedQueries( vk::Bool32 inheritedQueries_ ) - { - inheritedQueries = inheritedQueries_; - return *this; - } - - operator VkPhysicalDeviceFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFeatures const& rhs ) const - { - return ( robustBufferAccess == rhs.robustBufferAccess ) - && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) - && ( imageCubeArray == rhs.imageCubeArray ) - && ( independentBlend == rhs.independentBlend ) - && ( geometryShader == rhs.geometryShader ) - && ( tessellationShader == rhs.tessellationShader ) - && ( sampleRateShading == rhs.sampleRateShading ) - && ( dualSrcBlend == rhs.dualSrcBlend ) - && ( logicOp == rhs.logicOp ) - && ( multiDrawIndirect == rhs.multiDrawIndirect ) - && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) - && ( depthClamp == rhs.depthClamp ) - && ( depthBiasClamp == rhs.depthBiasClamp ) - && ( fillModeNonSolid == rhs.fillModeNonSolid ) - && ( depthBounds == rhs.depthBounds ) - && ( wideLines == rhs.wideLines ) - && ( largePoints == rhs.largePoints ) - && ( alphaToOne == rhs.alphaToOne ) - && ( multiViewport == rhs.multiViewport ) - && ( samplerAnisotropy == rhs.samplerAnisotropy ) - && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) - && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) - && ( textureCompressionBC == rhs.textureCompressionBC ) - && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) - && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) - && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) - && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) - && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) - && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) - && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) - && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) - && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) - && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) - && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) - && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) - && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) - && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) - && ( shaderClipDistance == rhs.shaderClipDistance ) - && ( shaderCullDistance == rhs.shaderCullDistance ) - && ( shaderFloat64 == rhs.shaderFloat64 ) - && ( shaderInt64 == rhs.shaderInt64 ) - && ( shaderInt16 == rhs.shaderInt16 ) - && ( shaderResourceResidency == rhs.shaderResourceResidency ) - && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) - && ( sparseBinding == rhs.sparseBinding ) - && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) - && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) - && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) - && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) - && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) - && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) - && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) - && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) - && ( variableMultisampleRate == rhs.variableMultisampleRate ) - && ( inheritedQueries == rhs.inheritedQueries ); - } - - bool operator!=( PhysicalDeviceFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Bool32 robustBufferAccess; - vk::Bool32 fullDrawIndexUint32; - vk::Bool32 imageCubeArray; - vk::Bool32 independentBlend; - vk::Bool32 geometryShader; - vk::Bool32 tessellationShader; - vk::Bool32 sampleRateShading; - vk::Bool32 dualSrcBlend; - vk::Bool32 logicOp; - vk::Bool32 multiDrawIndirect; - vk::Bool32 drawIndirectFirstInstance; - vk::Bool32 depthClamp; - vk::Bool32 depthBiasClamp; - vk::Bool32 fillModeNonSolid; - vk::Bool32 depthBounds; - vk::Bool32 wideLines; - vk::Bool32 largePoints; - vk::Bool32 alphaToOne; - vk::Bool32 multiViewport; - vk::Bool32 samplerAnisotropy; - vk::Bool32 textureCompressionETC2; - vk::Bool32 textureCompressionASTC_LDR; - vk::Bool32 textureCompressionBC; - vk::Bool32 occlusionQueryPrecise; - vk::Bool32 pipelineStatisticsQuery; - vk::Bool32 vertexPipelineStoresAndAtomics; - vk::Bool32 fragmentStoresAndAtomics; - vk::Bool32 shaderTessellationAndGeometryPointSize; - vk::Bool32 shaderImageGatherExtended; - vk::Bool32 shaderStorageImageExtendedFormats; - vk::Bool32 shaderStorageImageMultisample; - vk::Bool32 shaderStorageImageReadWithoutFormat; - vk::Bool32 shaderStorageImageWriteWithoutFormat; - vk::Bool32 shaderUniformBufferArrayDynamicIndexing; - vk::Bool32 shaderSampledImageArrayDynamicIndexing; - vk::Bool32 shaderStorageBufferArrayDynamicIndexing; - vk::Bool32 shaderStorageImageArrayDynamicIndexing; - vk::Bool32 shaderClipDistance; - vk::Bool32 shaderCullDistance; - vk::Bool32 shaderFloat64; - vk::Bool32 shaderInt64; - vk::Bool32 shaderInt16; - vk::Bool32 shaderResourceResidency; - vk::Bool32 shaderResourceMinLod; - vk::Bool32 sparseBinding; - vk::Bool32 sparseResidencyBuffer; - vk::Bool32 sparseResidencyImage2D; - vk::Bool32 sparseResidencyImage3D; - vk::Bool32 sparseResidency2Samples; - vk::Bool32 sparseResidency4Samples; - vk::Bool32 sparseResidency8Samples; - vk::Bool32 sparseResidency16Samples; - vk::Bool32 sparseResidencyAliased; - vk::Bool32 variableMultisampleRate; - vk::Bool32 inheritedQueries; - }; - static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceCreateInfo - { - protected: - DeviceCreateInfo( vk::DeviceCreateFlags flags_ = vk::DeviceCreateFlags(), - uint32_t queueCreateInfoCount_ = 0, - const vk::DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, - uint32_t enabledLayerCount_ = 0, - const char* const* ppEnabledLayerNames_ = nullptr, - uint32_t enabledExtensionCount_ = 0, - const char* const* ppEnabledExtensionNames_ = nullptr, - const vk::PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) - : flags( flags_ ) - , queueCreateInfoCount( queueCreateInfoCount_ ) - , pQueueCreateInfos( pQueueCreateInfos_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) - , pEnabledFeatures( pEnabledFeatures_ ) - {} - - DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceCreateInfo; - const void* pNext = nullptr; - vk::DeviceCreateFlags flags; - uint32_t queueCreateInfoCount; - const vk::DeviceQueueCreateInfo* pQueueCreateInfos; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; - const vk::PhysicalDeviceFeatures* pEnabledFeatures; - }; - static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceCreateInfo : public layout::DeviceCreateInfo - { - DeviceCreateInfo( vk::DeviceCreateFlags flags_ = vk::DeviceCreateFlags(), - uint32_t queueCreateInfoCount_ = 0, - const vk::DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, - uint32_t enabledLayerCount_ = 0, - const char* const* ppEnabledLayerNames_ = nullptr, - uint32_t enabledExtensionCount_ = 0, - const char* const* ppEnabledExtensionNames_ = nullptr, - const vk::PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) - : layout::DeviceCreateInfo( flags_, queueCreateInfoCount_, pQueueCreateInfos_, enabledLayerCount_, ppEnabledLayerNames_, enabledExtensionCount_, ppEnabledExtensionNames_, pEnabledFeatures_ ) - {} - - DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) - : layout::DeviceCreateInfo( rhs ) - {} - - DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceCreateInfo & setFlags( vk::DeviceCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) - { - queueCreateInfoCount = queueCreateInfoCount_; - return *this; - } - - DeviceCreateInfo & setPQueueCreateInfos( const vk::DeviceQueueCreateInfo* pQueueCreateInfos_ ) - { - pQueueCreateInfos = pQueueCreateInfos_; - return *this; - } - - DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) - { - enabledLayerCount = enabledLayerCount_; - return *this; - } - - DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) - { - ppEnabledLayerNames = ppEnabledLayerNames_; - return *this; - } - - DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) - { - enabledExtensionCount = enabledExtensionCount_; - return *this; - } - - DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) - { - ppEnabledExtensionNames = ppEnabledExtensionNames_; - return *this; - } - - DeviceCreateInfo & setPEnabledFeatures( const vk::PhysicalDeviceFeatures* pEnabledFeatures_ ) - { - pEnabledFeatures = pEnabledFeatures_; - return *this; - } - - operator VkDeviceCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) - && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) - && ( enabledLayerCount == rhs.enabledLayerCount ) - && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) - && ( enabledExtensionCount == rhs.enabledExtensionCount ) - && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) - && ( pEnabledFeatures == rhs.pEnabledFeatures ); - } - - bool operator!=( DeviceCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceCreateInfo::sType; - }; - static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceEventInfoEXT - { - protected: - DeviceEventInfoEXT( vk::DeviceEventTypeEXT deviceEvent_ = vk::DeviceEventTypeEXT::eDisplayHotplug ) - : deviceEvent( deviceEvent_ ) - {} - - DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceEventInfoEXT; - const void* pNext = nullptr; - vk::DeviceEventTypeEXT deviceEvent; - }; - static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DeviceEventInfoEXT : public layout::DeviceEventInfoEXT - { - DeviceEventInfoEXT( vk::DeviceEventTypeEXT deviceEvent_ = vk::DeviceEventTypeEXT::eDisplayHotplug ) - : layout::DeviceEventInfoEXT( deviceEvent_ ) - {} - - DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) - : layout::DeviceEventInfoEXT( rhs ) - {} - - DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceEventInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceEventInfoEXT & setDeviceEvent( vk::DeviceEventTypeEXT deviceEvent_ ) - { - deviceEvent = deviceEvent_; - return *this; - } - - operator VkDeviceEventInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceEventInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceEventInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceEvent == rhs.deviceEvent ); - } - - bool operator!=( DeviceEventInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceEventInfoEXT::sType; - }; - static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGeneratedCommandsFeaturesNVX - { - protected: - DeviceGeneratedCommandsFeaturesNVX( vk::Bool32 computeBindingPointSupport_ = 0 ) - : computeBindingPointSupport( computeBindingPointSupport_ ) - {} - - DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX; - const void* pNext = nullptr; - vk::Bool32 computeBindingPointSupport; - }; - static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGeneratedCommandsFeaturesNVX : public layout::DeviceGeneratedCommandsFeaturesNVX - { - DeviceGeneratedCommandsFeaturesNVX( vk::Bool32 computeBindingPointSupport_ = 0 ) - : layout::DeviceGeneratedCommandsFeaturesNVX( computeBindingPointSupport_ ) - {} - - DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) - : layout::DeviceGeneratedCommandsFeaturesNVX( rhs ) - {} - - DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( vk::Bool32 computeBindingPointSupport_ ) - { - computeBindingPointSupport = computeBindingPointSupport_; - return *this; - } - - operator VkDeviceGeneratedCommandsFeaturesNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGeneratedCommandsFeaturesNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( computeBindingPointSupport == rhs.computeBindingPointSupport ); - } - - bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGeneratedCommandsFeaturesNVX::sType; - }; - static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGeneratedCommandsLimitsNVX - { - protected: - DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, - uint32_t maxObjectEntryCounts_ = 0, - uint32_t minSequenceCountBufferOffsetAlignment_ = 0, - uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, - uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) - : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ ) - , maxObjectEntryCounts( maxObjectEntryCounts_ ) - , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ ) - , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ ) - , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ ) - {} - - DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX; - const void* pNext = nullptr; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; - }; - static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGeneratedCommandsLimitsNVX : public layout::DeviceGeneratedCommandsLimitsNVX - { - DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, - uint32_t maxObjectEntryCounts_ = 0, - uint32_t minSequenceCountBufferOffsetAlignment_ = 0, - uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, - uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) - : layout::DeviceGeneratedCommandsLimitsNVX( maxIndirectCommandsLayoutTokenCount_, maxObjectEntryCounts_, minSequenceCountBufferOffsetAlignment_, minSequenceIndexBufferOffsetAlignment_, minCommandsTokenBufferOffsetAlignment_ ) - {} - - DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) - : layout::DeviceGeneratedCommandsLimitsNVX( rhs ) - {} - - DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) - { - maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) - { - maxObjectEntryCounts = maxObjectEntryCounts_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) - { - minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) - { - minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_; - return *this; - } - - DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) - { - minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_; - return *this; - } - - operator VkDeviceGeneratedCommandsLimitsNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGeneratedCommandsLimitsNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount ) - && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts ) - && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment ) - && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment ) - && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment ); - } - - bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGeneratedCommandsLimitsNVX::sType; - }; - static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupBindSparseInfo - { - protected: - DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0, - uint32_t memoryDeviceIndex_ = 0 ) - : resourceDeviceIndex( resourceDeviceIndex_ ) - , memoryDeviceIndex( memoryDeviceIndex_ ) - {} - - DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; - const void* pNext = nullptr; - uint32_t resourceDeviceIndex; - uint32_t memoryDeviceIndex; - }; - static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupBindSparseInfo : public layout::DeviceGroupBindSparseInfo - { - DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0, - uint32_t memoryDeviceIndex_ = 0 ) - : layout::DeviceGroupBindSparseInfo( resourceDeviceIndex_, memoryDeviceIndex_ ) - {} - - DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) - : layout::DeviceGroupBindSparseInfo( rhs ) - {} - - DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) - { - resourceDeviceIndex = resourceDeviceIndex_; - return *this; - } - - DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) - { - memoryDeviceIndex = memoryDeviceIndex_; - return *this; - } - - operator VkDeviceGroupBindSparseInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupBindSparseInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupBindSparseInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) - && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); - } - - bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupBindSparseInfo::sType; - }; - static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupCommandBufferBeginInfo - { - protected: - DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) - : deviceMask( deviceMask_ ) - {} - - DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; - const void* pNext = nullptr; - uint32_t deviceMask; - }; - static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupCommandBufferBeginInfo : public layout::DeviceGroupCommandBufferBeginInfo - { - DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) - : layout::DeviceGroupCommandBufferBeginInfo( deviceMask_ ) - {} - - DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) - : layout::DeviceGroupCommandBufferBeginInfo( rhs ) - {} - - DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) - { - deviceMask = deviceMask_; - return *this; - } - - operator VkDeviceGroupCommandBufferBeginInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupCommandBufferBeginInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ); - } - - bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupCommandBufferBeginInfo::sType; - }; - static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupDeviceCreateInfo - { - protected: - DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0, - const vk::PhysicalDevice* pPhysicalDevices_ = nullptr ) - : physicalDeviceCount( physicalDeviceCount_ ) - , pPhysicalDevices( pPhysicalDevices_ ) - {} - - DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; - const void* pNext = nullptr; - uint32_t physicalDeviceCount; - const vk::PhysicalDevice* pPhysicalDevices; - }; - static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupDeviceCreateInfo : public layout::DeviceGroupDeviceCreateInfo - { - DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0, - const vk::PhysicalDevice* pPhysicalDevices_ = nullptr ) - : layout::DeviceGroupDeviceCreateInfo( physicalDeviceCount_, pPhysicalDevices_ ) - {} - - DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) - : layout::DeviceGroupDeviceCreateInfo( rhs ) - {} - - DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) - { - physicalDeviceCount = physicalDeviceCount_; - return *this; - } - - DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const vk::PhysicalDevice* pPhysicalDevices_ ) - { - pPhysicalDevices = pPhysicalDevices_; - return *this; - } - - operator VkDeviceGroupDeviceCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupDeviceCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( pPhysicalDevices == rhs.pPhysicalDevices ); - } - - bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupDeviceCreateInfo::sType; - }; - static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupPresentCapabilitiesKHR - { - protected: - DeviceGroupPresentCapabilitiesKHR( std::array const& presentMask_ = { { 0 } }, - vk::DeviceGroupPresentModeFlagsKHR modes_ = vk::DeviceGroupPresentModeFlagsKHR() ) - : modes( modes_ ) - { - memcpy( &presentMask, presentMask_.data(), VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ); - - } - - DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; - const void* pNext = nullptr; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; - vk::DeviceGroupPresentModeFlagsKHR modes; - }; - static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupPresentCapabilitiesKHR : public layout::DeviceGroupPresentCapabilitiesKHR - { - operator VkDeviceGroupPresentCapabilitiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupPresentCapabilitiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 ) - && ( modes == rhs.modes ); - } - - bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupPresentCapabilitiesKHR::sType; - }; - static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupPresentInfoKHR - { - protected: - DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0, - const uint32_t* pDeviceMasks_ = nullptr, - vk::DeviceGroupPresentModeFlagBitsKHR mode_ = vk::DeviceGroupPresentModeFlagBitsKHR::eLocal ) - : swapchainCount( swapchainCount_ ) - , pDeviceMasks( pDeviceMasks_ ) - , mode( mode_ ) - {} - - DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; - const void* pNext = nullptr; - uint32_t swapchainCount; - const uint32_t* pDeviceMasks; - vk::DeviceGroupPresentModeFlagBitsKHR mode; - }; - static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupPresentInfoKHR : public layout::DeviceGroupPresentInfoKHR - { - DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0, - const uint32_t* pDeviceMasks_ = nullptr, - vk::DeviceGroupPresentModeFlagBitsKHR mode_ = vk::DeviceGroupPresentModeFlagBitsKHR::eLocal ) - : layout::DeviceGroupPresentInfoKHR( swapchainCount_, pDeviceMasks_, mode_ ) - {} - - DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) - : layout::DeviceGroupPresentInfoKHR( rhs ) - {} - - DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) - { - swapchainCount = swapchainCount_; - return *this; - } - - DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) - { - pDeviceMasks = pDeviceMasks_; - return *this; - } - - DeviceGroupPresentInfoKHR & setMode( vk::DeviceGroupPresentModeFlagBitsKHR mode_ ) - { - mode = mode_; - return *this; - } - - operator VkDeviceGroupPresentInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupPresentInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pDeviceMasks == rhs.pDeviceMasks ) - && ( mode == rhs.mode ); - } - - bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupPresentInfoKHR::sType; - }; - static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupRenderPassBeginInfo - { - protected: - DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0, - uint32_t deviceRenderAreaCount_ = 0, - const vk::Rect2D* pDeviceRenderAreas_ = nullptr ) - : deviceMask( deviceMask_ ) - , deviceRenderAreaCount( deviceRenderAreaCount_ ) - , pDeviceRenderAreas( pDeviceRenderAreas_ ) - {} - - DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; - const void* pNext = nullptr; - uint32_t deviceMask; - uint32_t deviceRenderAreaCount; - const vk::Rect2D* pDeviceRenderAreas; - }; - static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupRenderPassBeginInfo : public layout::DeviceGroupRenderPassBeginInfo - { - DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0, - uint32_t deviceRenderAreaCount_ = 0, - const vk::Rect2D* pDeviceRenderAreas_ = nullptr ) - : layout::DeviceGroupRenderPassBeginInfo( deviceMask_, deviceRenderAreaCount_, pDeviceRenderAreas_ ) - {} - - DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) - : layout::DeviceGroupRenderPassBeginInfo( rhs ) - {} - - DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) - { - deviceMask = deviceMask_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) - { - deviceRenderAreaCount = deviceRenderAreaCount_; - return *this; - } - - DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const vk::Rect2D* pDeviceRenderAreas_ ) - { - pDeviceRenderAreas = pDeviceRenderAreas_; - return *this; - } - - operator VkDeviceGroupRenderPassBeginInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupRenderPassBeginInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceMask == rhs.deviceMask ) - && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) - && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); - } - - bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupRenderPassBeginInfo::sType; - }; - static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupSubmitInfo - { - protected: - DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0, - const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr, - uint32_t commandBufferCount_ = 0, - const uint32_t* pCommandBufferDeviceMasks_ = nullptr, - uint32_t signalSemaphoreCount_ = 0, - const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) - {} - - DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupSubmitInfo; - const void* pNext = nullptr; - uint32_t waitSemaphoreCount; - const uint32_t* pWaitSemaphoreDeviceIndices; - uint32_t commandBufferCount; - const uint32_t* pCommandBufferDeviceMasks; - uint32_t signalSemaphoreCount; - const uint32_t* pSignalSemaphoreDeviceIndices; - }; - static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupSubmitInfo : public layout::DeviceGroupSubmitInfo - { - DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0, - const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr, - uint32_t commandBufferCount_ = 0, - const uint32_t* pCommandBufferDeviceMasks_ = nullptr, - uint32_t signalSemaphoreCount_ = 0, - const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) - : layout::DeviceGroupSubmitInfo( waitSemaphoreCount_, pWaitSemaphoreDeviceIndices_, commandBufferCount_, pCommandBufferDeviceMasks_, signalSemaphoreCount_, pSignalSemaphoreDeviceIndices_ ) - {} - - DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) - : layout::DeviceGroupSubmitInfo( rhs ) - {} - - DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) - { - pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; - return *this; - } - - DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) - { - commandBufferCount = commandBufferCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) - { - pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; - return *this; - } - - DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) - { - pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; - return *this; - } - - operator VkDeviceGroupSubmitInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupSubmitInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupSubmitInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); - } - - bool operator!=( DeviceGroupSubmitInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupSubmitInfo::sType; - }; - static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceGroupSwapchainCreateInfoKHR - { - protected: - DeviceGroupSwapchainCreateInfoKHR( vk::DeviceGroupPresentModeFlagsKHR modes_ = vk::DeviceGroupPresentModeFlagsKHR() ) - : modes( modes_ ) - {} - - DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; - const void* pNext = nullptr; - vk::DeviceGroupPresentModeFlagsKHR modes; - }; - static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct DeviceGroupSwapchainCreateInfoKHR : public layout::DeviceGroupSwapchainCreateInfoKHR - { - DeviceGroupSwapchainCreateInfoKHR( vk::DeviceGroupPresentModeFlagsKHR modes_ = vk::DeviceGroupPresentModeFlagsKHR() ) - : layout::DeviceGroupSwapchainCreateInfoKHR( modes_ ) - {} - - DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) - : layout::DeviceGroupSwapchainCreateInfoKHR( rhs ) - {} - - DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceGroupSwapchainCreateInfoKHR & setModes( vk::DeviceGroupPresentModeFlagsKHR modes_ ) - { - modes = modes_; - return *this; - } - - operator VkDeviceGroupSwapchainCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceGroupSwapchainCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( modes == rhs.modes ); - } - - bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceGroupSwapchainCreateInfoKHR::sType; - }; - static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceMemoryOverallocationCreateInfoAMD - { - protected: - DeviceMemoryOverallocationCreateInfoAMD( vk::MemoryOverallocationBehaviorAMD overallocationBehavior_ = vk::MemoryOverallocationBehaviorAMD::eDefault ) - : overallocationBehavior( overallocationBehavior_ ) - {} - - DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; - const void* pNext = nullptr; - vk::MemoryOverallocationBehaviorAMD overallocationBehavior; - }; - static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "layout struct and wrapper have different size!" ); - } - - struct DeviceMemoryOverallocationCreateInfoAMD : public layout::DeviceMemoryOverallocationCreateInfoAMD - { - DeviceMemoryOverallocationCreateInfoAMD( vk::MemoryOverallocationBehaviorAMD overallocationBehavior_ = vk::MemoryOverallocationBehaviorAMD::eDefault ) - : layout::DeviceMemoryOverallocationCreateInfoAMD( overallocationBehavior_ ) - {} - - DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) - : layout::DeviceMemoryOverallocationCreateInfoAMD( rhs ) - {} - - DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( vk::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) - { - overallocationBehavior = overallocationBehavior_; - return *this; - } - - operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceMemoryOverallocationCreateInfoAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( overallocationBehavior == rhs.overallocationBehavior ); - } - - bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceMemoryOverallocationCreateInfoAMD::sType; - }; - static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceQueueGlobalPriorityCreateInfoEXT - { - protected: - DeviceQueueGlobalPriorityCreateInfoEXT( vk::QueueGlobalPriorityEXT globalPriority_ = vk::QueueGlobalPriorityEXT::eLow ) - : globalPriority( globalPriority_ ) - {} - - DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; - const void* pNext = nullptr; - vk::QueueGlobalPriorityEXT globalPriority; - }; - static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DeviceQueueGlobalPriorityCreateInfoEXT : public layout::DeviceQueueGlobalPriorityCreateInfoEXT - { - DeviceQueueGlobalPriorityCreateInfoEXT( vk::QueueGlobalPriorityEXT globalPriority_ = vk::QueueGlobalPriorityEXT::eLow ) - : layout::DeviceQueueGlobalPriorityCreateInfoEXT( globalPriority_ ) - {} - - DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) - : layout::DeviceQueueGlobalPriorityCreateInfoEXT( rhs ) - {} - - DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( vk::QueueGlobalPriorityEXT globalPriority_ ) - { - globalPriority = globalPriority_; - return *this; - } - - operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( globalPriority == rhs.globalPriority ); - } - - bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceQueueGlobalPriorityCreateInfoEXT::sType; - }; - static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DeviceQueueInfo2 - { - protected: - DeviceQueueInfo2( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(), - uint32_t queueFamilyIndex_ = 0, - uint32_t queueIndex_ = 0 ) - : flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueIndex( queueIndex_ ) - {} - - DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDeviceQueueInfo2; - const void* pNext = nullptr; - vk::DeviceQueueCreateFlags flags; - uint32_t queueFamilyIndex; - uint32_t queueIndex; - }; - static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "layout struct and wrapper have different size!" ); - } - - struct DeviceQueueInfo2 : public layout::DeviceQueueInfo2 - { - DeviceQueueInfo2( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(), - uint32_t queueFamilyIndex_ = 0, - uint32_t queueIndex_ = 0 ) - : layout::DeviceQueueInfo2( flags_, queueFamilyIndex_, queueIndex_ ) - {} - - DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) - : layout::DeviceQueueInfo2( rhs ) - {} - - DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DeviceQueueInfo2 & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DeviceQueueInfo2 & setFlags( vk::DeviceQueueCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) - { - queueIndex = queueIndex_; - return *this; - } - - operator VkDeviceQueueInfo2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDeviceQueueInfo2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DeviceQueueInfo2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queueFamilyIndex == rhs.queueFamilyIndex ) - && ( queueIndex == rhs.queueIndex ); - } - - bool operator!=( DeviceQueueInfo2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DeviceQueueInfo2::sType; - }; - static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DispatchIndirectCommand - { - DispatchIndirectCommand( uint32_t x_ = 0, - uint32_t y_ = 0, - uint32_t z_ = 0 ) - : x( x_ ) - , y( y_ ) - , z( z_ ) - {} - - DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DispatchIndirectCommand & setX( uint32_t x_ ) - { - x = x_; - return *this; - } - - DispatchIndirectCommand & setY( uint32_t y_ ) - { - y = y_; - return *this; - } - - DispatchIndirectCommand & setZ( uint32_t z_ ) - { - z = z_; - return *this; - } - - operator VkDispatchIndirectCommand const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDispatchIndirectCommand &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DispatchIndirectCommand const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ); - } - - bool operator!=( DispatchIndirectCommand const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t x; - uint32_t y; - uint32_t z; - }; - static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayEventInfoEXT - { - protected: - DisplayEventInfoEXT( vk::DisplayEventTypeEXT displayEvent_ = vk::DisplayEventTypeEXT::eFirstPixelOut ) - : displayEvent( displayEvent_ ) - {} - - DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayEventInfoEXT; - const void* pNext = nullptr; - vk::DisplayEventTypeEXT displayEvent; - }; - static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DisplayEventInfoEXT : public layout::DisplayEventInfoEXT - { - DisplayEventInfoEXT( vk::DisplayEventTypeEXT displayEvent_ = vk::DisplayEventTypeEXT::eFirstPixelOut ) - : layout::DisplayEventInfoEXT( displayEvent_ ) - {} - - DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) - : layout::DisplayEventInfoEXT( rhs ) - {} - - DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplayEventInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DisplayEventInfoEXT & setDisplayEvent( vk::DisplayEventTypeEXT displayEvent_ ) - { - displayEvent = displayEvent_; - return *this; - } - - operator VkDisplayEventInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayEventInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayEventInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayEvent == rhs.displayEvent ); - } - - bool operator!=( DisplayEventInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayEventInfoEXT::sType; - }; - static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModeParametersKHR - { - DisplayModeParametersKHR( vk::Extent2D visibleRegion_ = vk::Extent2D(), - uint32_t refreshRate_ = 0 ) - : visibleRegion( visibleRegion_ ) - , refreshRate( refreshRate_ ) - {} - - DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplayModeParametersKHR & setVisibleRegion( vk::Extent2D visibleRegion_ ) - { - visibleRegion = visibleRegion_; - return *this; - } - - DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) - { - refreshRate = refreshRate_; - return *this; - } - - operator VkDisplayModeParametersKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeParametersKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModeParametersKHR const& rhs ) const - { - return ( visibleRegion == rhs.visibleRegion ) - && ( refreshRate == rhs.refreshRate ); - } - - bool operator!=( DisplayModeParametersKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Extent2D visibleRegion; - uint32_t refreshRate; - }; - static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayModeCreateInfoKHR - { - protected: - DisplayModeCreateInfoKHR( vk::DisplayModeCreateFlagsKHR flags_ = vk::DisplayModeCreateFlagsKHR(), - vk::DisplayModeParametersKHR parameters_ = vk::DisplayModeParametersKHR() ) - : flags( flags_ ) - , parameters( parameters_ ) - {} - - DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; - const void* pNext = nullptr; - vk::DisplayModeCreateFlagsKHR flags; - vk::DisplayModeParametersKHR parameters; - }; - static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayModeCreateInfoKHR : public layout::DisplayModeCreateInfoKHR - { - DisplayModeCreateInfoKHR( vk::DisplayModeCreateFlagsKHR flags_ = vk::DisplayModeCreateFlagsKHR(), - vk::DisplayModeParametersKHR parameters_ = vk::DisplayModeParametersKHR() ) - : layout::DisplayModeCreateInfoKHR( flags_, parameters_ ) - {} - - DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) - : layout::DisplayModeCreateInfoKHR( rhs ) - {} - - DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DisplayModeCreateInfoKHR & setFlags( vk::DisplayModeCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - DisplayModeCreateInfoKHR & setParameters( vk::DisplayModeParametersKHR parameters_ ) - { - parameters = parameters_; - return *this; - } - - operator VkDisplayModeCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModeCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( parameters == rhs.parameters ); - } - - bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayModeCreateInfoKHR::sType; - }; - static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayModePropertiesKHR - { - operator VkDisplayModePropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModePropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModePropertiesKHR const& rhs ) const - { - return ( displayMode == rhs.displayMode ) - && ( parameters == rhs.parameters ); - } - - bool operator!=( DisplayModePropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DisplayModeKHR displayMode; - vk::DisplayModeParametersKHR parameters; - }; - static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayModeProperties2KHR - { - protected: - DisplayModeProperties2KHR( vk::DisplayModePropertiesKHR displayModeProperties_ = vk::DisplayModePropertiesKHR() ) - : displayModeProperties( displayModeProperties_ ) - {} - - DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayModeProperties2KHR; - void* pNext = nullptr; - vk::DisplayModePropertiesKHR displayModeProperties; - }; - static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayModeProperties2KHR : public layout::DisplayModeProperties2KHR - { - operator VkDisplayModeProperties2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayModeProperties2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayModeProperties2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayModeProperties == rhs.displayModeProperties ); - } - - bool operator!=( DisplayModeProperties2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayModeProperties2KHR::sType; - }; - static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayNativeHdrSurfaceCapabilitiesAMD - { - protected: - DisplayNativeHdrSurfaceCapabilitiesAMD( vk::Bool32 localDimmingSupport_ = 0 ) - : localDimmingSupport( localDimmingSupport_ ) - {} - - DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; - void* pNext = nullptr; - vk::Bool32 localDimmingSupport; - }; - static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "layout struct and wrapper have different size!" ); - } - - struct DisplayNativeHdrSurfaceCapabilitiesAMD : public layout::DisplayNativeHdrSurfaceCapabilitiesAMD - { - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingSupport == rhs.localDimmingSupport ); - } - - bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayNativeHdrSurfaceCapabilitiesAMD::sType; - }; - static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlaneCapabilitiesKHR - { - operator VkDisplayPlaneCapabilitiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneCapabilitiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const - { - return ( supportedAlpha == rhs.supportedAlpha ) - && ( minSrcPosition == rhs.minSrcPosition ) - && ( maxSrcPosition == rhs.maxSrcPosition ) - && ( minSrcExtent == rhs.minSrcExtent ) - && ( maxSrcExtent == rhs.maxSrcExtent ) - && ( minDstPosition == rhs.minDstPosition ) - && ( maxDstPosition == rhs.maxDstPosition ) - && ( minDstExtent == rhs.minDstExtent ) - && ( maxDstExtent == rhs.maxDstExtent ); - } - - bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DisplayPlaneAlphaFlagsKHR supportedAlpha; - vk::Offset2D minSrcPosition; - vk::Offset2D maxSrcPosition; - vk::Extent2D minSrcExtent; - vk::Extent2D maxSrcExtent; - vk::Offset2D minDstPosition; - vk::Offset2D maxDstPosition; - vk::Extent2D minDstExtent; - vk::Extent2D maxDstExtent; - }; - static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayPlaneCapabilities2KHR - { - protected: - DisplayPlaneCapabilities2KHR( vk::DisplayPlaneCapabilitiesKHR capabilities_ = vk::DisplayPlaneCapabilitiesKHR() ) - : capabilities( capabilities_ ) - {} - - DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; - void* pNext = nullptr; - vk::DisplayPlaneCapabilitiesKHR capabilities; - }; - static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayPlaneCapabilities2KHR : public layout::DisplayPlaneCapabilities2KHR - { - operator VkDisplayPlaneCapabilities2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneCapabilities2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( capabilities == rhs.capabilities ); - } - - bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayPlaneCapabilities2KHR::sType; - }; - static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayPlaneInfo2KHR - { - protected: - DisplayPlaneInfo2KHR( vk::DisplayModeKHR mode_ = vk::DisplayModeKHR(), - uint32_t planeIndex_ = 0 ) - : mode( mode_ ) - , planeIndex( planeIndex_ ) - {} - - DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; - const void* pNext = nullptr; - vk::DisplayModeKHR mode; - uint32_t planeIndex; - }; - static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayPlaneInfo2KHR : public layout::DisplayPlaneInfo2KHR - { - DisplayPlaneInfo2KHR( vk::DisplayModeKHR mode_ = vk::DisplayModeKHR(), - uint32_t planeIndex_ = 0 ) - : layout::DisplayPlaneInfo2KHR( mode_, planeIndex_ ) - {} - - DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) - : layout::DisplayPlaneInfo2KHR( rhs ) - {} - - DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DisplayPlaneInfo2KHR & setMode( vk::DisplayModeKHR mode_ ) - { - mode = mode_; - return *this; - } - - DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) - { - planeIndex = planeIndex_; - return *this; - } - - operator VkDisplayPlaneInfo2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneInfo2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneInfo2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( mode == rhs.mode ) - && ( planeIndex == rhs.planeIndex ); - } - - bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayPlaneInfo2KHR::sType; - }; - static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPlanePropertiesKHR - { - operator VkDisplayPlanePropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlanePropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlanePropertiesKHR const& rhs ) const - { - return ( currentDisplay == rhs.currentDisplay ) - && ( currentStackIndex == rhs.currentStackIndex ); - } - - bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DisplayKHR currentDisplay; - uint32_t currentStackIndex; - }; - static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayPlaneProperties2KHR - { - protected: - DisplayPlaneProperties2KHR( vk::DisplayPlanePropertiesKHR displayPlaneProperties_ = vk::DisplayPlanePropertiesKHR() ) - : displayPlaneProperties( displayPlaneProperties_ ) - {} - - DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; - void* pNext = nullptr; - vk::DisplayPlanePropertiesKHR displayPlaneProperties; - }; - static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayPlaneProperties2KHR : public layout::DisplayPlaneProperties2KHR - { - operator VkDisplayPlaneProperties2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPlaneProperties2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPlaneProperties2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPlaneProperties == rhs.displayPlaneProperties ); - } - - bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayPlaneProperties2KHR::sType; - }; - static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayPowerInfoEXT - { - protected: - DisplayPowerInfoEXT( vk::DisplayPowerStateEXT powerState_ = vk::DisplayPowerStateEXT::eOff ) - : powerState( powerState_ ) - {} - - DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayPowerInfoEXT; - const void* pNext = nullptr; - vk::DisplayPowerStateEXT powerState; - }; - static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct DisplayPowerInfoEXT : public layout::DisplayPowerInfoEXT - { - DisplayPowerInfoEXT( vk::DisplayPowerStateEXT powerState_ = vk::DisplayPowerStateEXT::eOff ) - : layout::DisplayPowerInfoEXT( powerState_ ) - {} - - DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) - : layout::DisplayPowerInfoEXT( rhs ) - {} - - DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplayPowerInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DisplayPowerInfoEXT & setPowerState( vk::DisplayPowerStateEXT powerState_ ) - { - powerState = powerState_; - return *this; - } - - operator VkDisplayPowerInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPowerInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPowerInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( powerState == rhs.powerState ); - } - - bool operator!=( DisplayPowerInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayPowerInfoEXT::sType; - }; - static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayPresentInfoKHR - { - protected: - DisplayPresentInfoKHR( vk::Rect2D srcRect_ = vk::Rect2D(), - vk::Rect2D dstRect_ = vk::Rect2D(), - vk::Bool32 persistent_ = 0 ) - : srcRect( srcRect_ ) - , dstRect( dstRect_ ) - , persistent( persistent_ ) - {} - - DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayPresentInfoKHR; - const void* pNext = nullptr; - vk::Rect2D srcRect; - vk::Rect2D dstRect; - vk::Bool32 persistent; - }; - static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayPresentInfoKHR : public layout::DisplayPresentInfoKHR - { - DisplayPresentInfoKHR( vk::Rect2D srcRect_ = vk::Rect2D(), - vk::Rect2D dstRect_ = vk::Rect2D(), - vk::Bool32 persistent_ = 0 ) - : layout::DisplayPresentInfoKHR( srcRect_, dstRect_, persistent_ ) - {} - - DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) - : layout::DisplayPresentInfoKHR( rhs ) - {} - - DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplayPresentInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DisplayPresentInfoKHR & setSrcRect( vk::Rect2D srcRect_ ) - { - srcRect = srcRect_; - return *this; - } - - DisplayPresentInfoKHR & setDstRect( vk::Rect2D dstRect_ ) - { - dstRect = dstRect_; - return *this; - } - - DisplayPresentInfoKHR & setPersistent( vk::Bool32 persistent_ ) - { - persistent = persistent_; - return *this; - } - - operator VkDisplayPresentInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPresentInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPresentInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcRect == rhs.srcRect ) - && ( dstRect == rhs.dstRect ) - && ( persistent == rhs.persistent ); - } - - bool operator!=( DisplayPresentInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayPresentInfoKHR::sType; - }; - static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DisplayPropertiesKHR - { - operator VkDisplayPropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayPropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayPropertiesKHR const& rhs ) const - { - return ( display == rhs.display ) - && ( displayName == rhs.displayName ) - && ( physicalDimensions == rhs.physicalDimensions ) - && ( physicalResolution == rhs.physicalResolution ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( planeReorderPossible == rhs.planeReorderPossible ) - && ( persistentContent == rhs.persistentContent ); - } - - bool operator!=( DisplayPropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DisplayKHR display; - const char* displayName; - vk::Extent2D physicalDimensions; - vk::Extent2D physicalResolution; - vk::SurfaceTransformFlagsKHR supportedTransforms; - vk::Bool32 planeReorderPossible; - vk::Bool32 persistentContent; - }; - static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplayProperties2KHR - { - protected: - DisplayProperties2KHR( vk::DisplayPropertiesKHR displayProperties_ = vk::DisplayPropertiesKHR() ) - : displayProperties( displayProperties_ ) - {} - - DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplayProperties2KHR; - void* pNext = nullptr; - vk::DisplayPropertiesKHR displayProperties; - }; - static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplayProperties2KHR : public layout::DisplayProperties2KHR - { - operator VkDisplayProperties2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplayProperties2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplayProperties2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayProperties == rhs.displayProperties ); - } - - bool operator!=( DisplayProperties2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplayProperties2KHR::sType; - }; - static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DisplaySurfaceCreateInfoKHR - { - protected: - DisplaySurfaceCreateInfoKHR( vk::DisplaySurfaceCreateFlagsKHR flags_ = vk::DisplaySurfaceCreateFlagsKHR(), - vk::DisplayModeKHR displayMode_ = vk::DisplayModeKHR(), - uint32_t planeIndex_ = 0, - uint32_t planeStackIndex_ = 0, - vk::SurfaceTransformFlagBitsKHR transform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity, - float globalAlpha_ = 0, - vk::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = vk::DisplayPlaneAlphaFlagBitsKHR::eOpaque, - vk::Extent2D imageExtent_ = vk::Extent2D() ) - : flags( flags_ ) - , displayMode( displayMode_ ) - , planeIndex( planeIndex_ ) - , planeStackIndex( planeStackIndex_ ) - , transform( transform_ ) - , globalAlpha( globalAlpha_ ) - , alphaMode( alphaMode_ ) - , imageExtent( imageExtent_ ) - {} - - DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; - const void* pNext = nullptr; - vk::DisplaySurfaceCreateFlagsKHR flags; - vk::DisplayModeKHR displayMode; - uint32_t planeIndex; - uint32_t planeStackIndex; - vk::SurfaceTransformFlagBitsKHR transform; - float globalAlpha; - vk::DisplayPlaneAlphaFlagBitsKHR alphaMode; - vk::Extent2D imageExtent; - }; - static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct DisplaySurfaceCreateInfoKHR : public layout::DisplaySurfaceCreateInfoKHR - { - DisplaySurfaceCreateInfoKHR( vk::DisplaySurfaceCreateFlagsKHR flags_ = vk::DisplaySurfaceCreateFlagsKHR(), - vk::DisplayModeKHR displayMode_ = vk::DisplayModeKHR(), - uint32_t planeIndex_ = 0, - uint32_t planeStackIndex_ = 0, - vk::SurfaceTransformFlagBitsKHR transform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity, - float globalAlpha_ = 0, - vk::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = vk::DisplayPlaneAlphaFlagBitsKHR::eOpaque, - vk::Extent2D imageExtent_ = vk::Extent2D() ) - : layout::DisplaySurfaceCreateInfoKHR( flags_, displayMode_, planeIndex_, planeStackIndex_, transform_, globalAlpha_, alphaMode_, imageExtent_ ) - {} - - DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) - : layout::DisplaySurfaceCreateInfoKHR( rhs ) - {} - - DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setFlags( vk::DisplaySurfaceCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setDisplayMode( vk::DisplayModeKHR displayMode_ ) - { - displayMode = displayMode_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) - { - planeIndex = planeIndex_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) - { - planeStackIndex = planeStackIndex_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setTransform( vk::SurfaceTransformFlagBitsKHR transform_ ) - { - transform = transform_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) - { - globalAlpha = globalAlpha_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setAlphaMode( vk::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) - { - alphaMode = alphaMode_; - return *this; - } - - DisplaySurfaceCreateInfoKHR & setImageExtent( vk::Extent2D imageExtent_ ) - { - imageExtent = imageExtent_; - return *this; - } - - operator VkDisplaySurfaceCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDisplaySurfaceCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( displayMode == rhs.displayMode ) - && ( planeIndex == rhs.planeIndex ) - && ( planeStackIndex == rhs.planeStackIndex ) - && ( transform == rhs.transform ) - && ( globalAlpha == rhs.globalAlpha ) - && ( alphaMode == rhs.alphaMode ) - && ( imageExtent == rhs.imageExtent ); - } - - bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DisplaySurfaceCreateInfoKHR::sType; - }; - static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawIndexedIndirectCommand - { - DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, - uint32_t instanceCount_ = 0, - uint32_t firstIndex_ = 0, - int32_t vertexOffset_ = 0, - uint32_t firstInstance_ = 0 ) - : indexCount( indexCount_ ) - , instanceCount( instanceCount_ ) - , firstIndex( firstIndex_ ) - , vertexOffset( vertexOffset_ ) - , firstInstance( firstInstance_ ) - {} - - DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) - { - indexCount = indexCount_; - return *this; - } - - DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) - { - instanceCount = instanceCount_; - return *this; - } - - DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) - { - firstIndex = firstIndex_; - return *this; - } - - DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) - { - vertexOffset = vertexOffset_; - return *this; - } - - DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) - { - firstInstance = firstInstance_; - return *this; - } - - operator VkDrawIndexedIndirectCommand const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDrawIndexedIndirectCommand &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DrawIndexedIndirectCommand const& rhs ) const - { - return ( indexCount == rhs.indexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstIndex == rhs.firstIndex ) - && ( vertexOffset == rhs.vertexOffset ) - && ( firstInstance == rhs.firstInstance ); - } - - bool operator!=( DrawIndexedIndirectCommand const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t indexCount; - uint32_t instanceCount; - uint32_t firstIndex; - int32_t vertexOffset; - uint32_t firstInstance; - }; - static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawIndirectCommand - { - DrawIndirectCommand( uint32_t vertexCount_ = 0, - uint32_t instanceCount_ = 0, - uint32_t firstVertex_ = 0, - uint32_t firstInstance_ = 0 ) - : vertexCount( vertexCount_ ) - , instanceCount( instanceCount_ ) - , firstVertex( firstVertex_ ) - , firstInstance( firstInstance_ ) - {} - - DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) - { - vertexCount = vertexCount_; - return *this; - } - - DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) - { - instanceCount = instanceCount_; - return *this; - } - - DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) - { - firstVertex = firstVertex_; - return *this; - } - - DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) - { - firstInstance = firstInstance_; - return *this; - } - - operator VkDrawIndirectCommand const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDrawIndirectCommand &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DrawIndirectCommand const& rhs ) const - { - return ( vertexCount == rhs.vertexCount ) - && ( instanceCount == rhs.instanceCount ) - && ( firstVertex == rhs.firstVertex ) - && ( firstInstance == rhs.firstInstance ); - } - - bool operator!=( DrawIndirectCommand const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t vertexCount; - uint32_t instanceCount; - uint32_t firstVertex; - uint32_t firstInstance; - }; - static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrawMeshTasksIndirectCommandNV - { - DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = 0, - uint32_t firstTask_ = 0 ) - : taskCount( taskCount_ ) - , firstTask( firstTask_ ) - {} - - DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) - { - taskCount = taskCount_; - return *this; - } - - DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) - { - firstTask = firstTask_; - return *this; - } - - operator VkDrawMeshTasksIndirectCommandNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDrawMeshTasksIndirectCommandNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const - { - return ( taskCount == rhs.taskCount ) - && ( firstTask == rhs.firstTask ); - } - - bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t taskCount; - uint32_t firstTask; - }; - static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct DrmFormatModifierPropertiesEXT - { - operator VkDrmFormatModifierPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDrmFormatModifierPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const - { - return ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); - } - - bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - vk::FormatFeatureFlags drmFormatModifierTilingFeatures; - }; - static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct DrmFormatModifierPropertiesListEXT - { - protected: - DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = 0, - vk::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = nullptr ) - : drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) - {} - - DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; - void* pNext = nullptr; - uint32_t drmFormatModifierCount; - vk::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; - }; - static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "layout struct and wrapper have different size!" ); - } - - struct DrmFormatModifierPropertiesListEXT : public layout::DrmFormatModifierPropertiesListEXT - { - operator VkDrmFormatModifierPropertiesListEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkDrmFormatModifierPropertiesListEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); - } - - bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::DrmFormatModifierPropertiesListEXT::sType; - }; - static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct EventCreateInfo - { - protected: - EventCreateInfo( vk::EventCreateFlags flags_ = vk::EventCreateFlags() ) - : flags( flags_ ) - {} - - EventCreateInfo( VkEventCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eEventCreateInfo; - const void* pNext = nullptr; - vk::EventCreateFlags flags; - }; - static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct EventCreateInfo : public layout::EventCreateInfo - { - EventCreateInfo( vk::EventCreateFlags flags_ = vk::EventCreateFlags() ) - : layout::EventCreateInfo( flags_ ) - {} - - EventCreateInfo( VkEventCreateInfo const & rhs ) - : layout::EventCreateInfo( rhs ) - {} - - EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - EventCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - EventCreateInfo & setFlags( vk::EventCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - operator VkEventCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkEventCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( EventCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( EventCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::EventCreateInfo::sType; - }; - static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExportFenceCreateInfo - { - protected: - ExportFenceCreateInfo( vk::ExternalFenceHandleTypeFlags handleTypes_ = vk::ExternalFenceHandleTypeFlags() ) - : handleTypes( handleTypes_ ) - {} - - ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportFenceCreateInfo; - const void* pNext = nullptr; - vk::ExternalFenceHandleTypeFlags handleTypes; - }; - static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ExportFenceCreateInfo : public layout::ExportFenceCreateInfo - { - ExportFenceCreateInfo( vk::ExternalFenceHandleTypeFlags handleTypes_ = vk::ExternalFenceHandleTypeFlags() ) - : layout::ExportFenceCreateInfo( handleTypes_ ) - {} - - ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) - : layout::ExportFenceCreateInfo( rhs ) - {} - - ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportFenceCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportFenceCreateInfo & setHandleTypes( vk::ExternalFenceHandleTypeFlags handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportFenceCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportFenceCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportFenceCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportFenceCreateInfo::sType; - }; - static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ExportFenceWin32HandleInfoKHR - { - protected: - ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0, - LPCWSTR name_ = nullptr ) - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; - const void* pNext = nullptr; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; - }; - static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ExportFenceWin32HandleInfoKHR : public layout::ExportFenceWin32HandleInfoKHR - { - ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0, - LPCWSTR name_ = nullptr ) - : layout::ExportFenceWin32HandleInfoKHR( pAttributes_, dwAccess_, name_ ) - {} - - ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) - : layout::ExportFenceWin32HandleInfoKHR( rhs ) - {} - - ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) - { - pAttributes = pAttributes_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) - { - dwAccess = dwAccess_; - return *this; - } - - ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) - { - name = name_; - return *this; - } - - operator VkExportFenceWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportFenceWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct ExportMemoryAllocateInfo - { - protected: - ExportMemoryAllocateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) - : handleTypes( handleTypes_ ) - {} - - ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportMemoryAllocateInfo; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlags handleTypes; - }; - static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ExportMemoryAllocateInfo : public layout::ExportMemoryAllocateInfo - { - ExportMemoryAllocateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) - : layout::ExportMemoryAllocateInfo( handleTypes_ ) - {} - - ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) - : layout::ExportMemoryAllocateInfo( rhs ) - {} - - ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportMemoryAllocateInfo & setHandleTypes( vk::ExternalMemoryHandleTypeFlags handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportMemoryAllocateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryAllocateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryAllocateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportMemoryAllocateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportMemoryAllocateInfo::sType; - }; - static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExportMemoryAllocateInfoNV - { - protected: - ExportMemoryAllocateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) - : handleTypes( handleTypes_ ) - {} - - ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagsNV handleTypes; - }; - static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct ExportMemoryAllocateInfoNV : public layout::ExportMemoryAllocateInfoNV - { - ExportMemoryAllocateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) - : layout::ExportMemoryAllocateInfoNV( handleTypes_ ) - {} - - ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) - : layout::ExportMemoryAllocateInfoNV( rhs ) - {} - - ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportMemoryAllocateInfoNV & setHandleTypes( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportMemoryAllocateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryAllocateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportMemoryAllocateInfoNV::sType; - }; - static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ExportMemoryWin32HandleInfoKHR - { - protected: - ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0, - LPCWSTR name_ = nullptr ) - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; - const void* pNext = nullptr; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ExportMemoryWin32HandleInfoKHR : public layout::ExportMemoryWin32HandleInfoKHR - { - ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0, - LPCWSTR name_ = nullptr ) - : layout::ExportMemoryWin32HandleInfoKHR( pAttributes_, dwAccess_, name_ ) - {} - - ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) - : layout::ExportMemoryWin32HandleInfoKHR( rhs ) - {} - - ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) - { - pAttributes = pAttributes_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) - { - dwAccess = dwAccess_; - return *this; - } - - ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) - { - name = name_; - return *this; - } - - operator VkExportMemoryWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportMemoryWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ExportMemoryWin32HandleInfoNV - { - protected: - ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0 ) - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - {} - - ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; - const void* pNext = nullptr; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct ExportMemoryWin32HandleInfoNV : public layout::ExportMemoryWin32HandleInfoNV - { - ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0 ) - : layout::ExportMemoryWin32HandleInfoNV( pAttributes_, dwAccess_ ) - {} - - ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) - : layout::ExportMemoryWin32HandleInfoNV( rhs ) - {} - - ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) - { - pAttributes = pAttributes_; - return *this; - } - - ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) - { - dwAccess = dwAccess_; - return *this; - } - - operator VkExportMemoryWin32HandleInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportMemoryWin32HandleInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ); - } - - bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportMemoryWin32HandleInfoNV::sType; - }; - static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct ExportSemaphoreCreateInfo - { - protected: - ExportSemaphoreCreateInfo( vk::ExternalSemaphoreHandleTypeFlags handleTypes_ = vk::ExternalSemaphoreHandleTypeFlags() ) - : handleTypes( handleTypes_ ) - {} - - ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportSemaphoreCreateInfo; - const void* pNext = nullptr; - vk::ExternalSemaphoreHandleTypeFlags handleTypes; - }; - static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ExportSemaphoreCreateInfo : public layout::ExportSemaphoreCreateInfo - { - ExportSemaphoreCreateInfo( vk::ExternalSemaphoreHandleTypeFlags handleTypes_ = vk::ExternalSemaphoreHandleTypeFlags() ) - : layout::ExportSemaphoreCreateInfo( handleTypes_ ) - {} - - ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) - : layout::ExportSemaphoreCreateInfo( rhs ) - {} - - ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportSemaphoreCreateInfo & setHandleTypes( vk::ExternalSemaphoreHandleTypeFlags handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExportSemaphoreCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportSemaphoreCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportSemaphoreCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportSemaphoreCreateInfo::sType; - }; - static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ExportSemaphoreWin32HandleInfoKHR - { - protected: - ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0, - LPCWSTR name_ = nullptr ) - : pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) - {} - - ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; - const void* pNext = nullptr; - const SECURITY_ATTRIBUTES* pAttributes; - DWORD dwAccess; - LPCWSTR name; - }; - static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ExportSemaphoreWin32HandleInfoKHR : public layout::ExportSemaphoreWin32HandleInfoKHR - { - ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, - DWORD dwAccess_ = 0, - LPCWSTR name_ = nullptr ) - : layout::ExportSemaphoreWin32HandleInfoKHR( pAttributes_, dwAccess_, name_ ) - {} - - ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) - : layout::ExportSemaphoreWin32HandleInfoKHR( rhs ) - {} - - ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) - { - pAttributes = pAttributes_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) - { - dwAccess = dwAccess_; - return *this; - } - - ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) - { - name = name_; - return *this; - } - - operator VkExportSemaphoreWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExportSemaphoreWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pAttributes == rhs.pAttributes ) - && ( dwAccess == rhs.dwAccess ) - && ( name == rhs.name ); - } - - bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExportSemaphoreWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExtensionProperties - { - operator VkExtensionProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExtensionProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExtensionProperties const& rhs ) const - { - return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( specVersion == rhs.specVersion ); - } - - bool operator!=( ExtensionProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; - }; - static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalMemoryProperties - { - operator VkExternalMemoryProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryProperties const& rhs ) const - { - return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); - } - - bool operator!=( ExternalMemoryProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ExternalMemoryFeatureFlags externalMemoryFeatures; - vk::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; - vk::ExternalMemoryHandleTypeFlags compatibleHandleTypes; - }; - static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExternalBufferProperties - { - protected: - ExternalBufferProperties( vk::ExternalMemoryProperties externalMemoryProperties_ = vk::ExternalMemoryProperties() ) - : externalMemoryProperties( externalMemoryProperties_ ) - {} - - ExternalBufferProperties( VkExternalBufferProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalBufferProperties; - void* pNext = nullptr; - vk::ExternalMemoryProperties externalMemoryProperties; - }; - static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "layout struct and wrapper have different size!" ); - } - - struct ExternalBufferProperties : public layout::ExternalBufferProperties - { - operator VkExternalBufferProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalBufferProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalBufferProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); - } - - bool operator!=( ExternalBufferProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalBufferProperties::sType; - }; - static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExternalFenceProperties - { - protected: - ExternalFenceProperties( vk::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = vk::ExternalFenceHandleTypeFlags(), - vk::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = vk::ExternalFenceHandleTypeFlags(), - vk::ExternalFenceFeatureFlags externalFenceFeatures_ = vk::ExternalFenceFeatureFlags() ) - : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalFenceFeatures( externalFenceFeatures_ ) - {} - - ExternalFenceProperties( VkExternalFenceProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalFenceProperties; - void* pNext = nullptr; - vk::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes; - vk::ExternalFenceHandleTypeFlags compatibleHandleTypes; - vk::ExternalFenceFeatureFlags externalFenceFeatures; - }; - static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "layout struct and wrapper have different size!" ); - } - - struct ExternalFenceProperties : public layout::ExternalFenceProperties - { - operator VkExternalFenceProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalFenceProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalFenceProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalFenceFeatures == rhs.externalFenceFeatures ); - } - - bool operator!=( ExternalFenceProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalFenceProperties::sType; - }; - static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct ExternalFormatANDROID - { - protected: - ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) - : externalFormat( externalFormat_ ) - {} - - ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalFormatANDROID; - void* pNext = nullptr; - uint64_t externalFormat; - }; - static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "layout struct and wrapper have different size!" ); - } - - struct ExternalFormatANDROID : public layout::ExternalFormatANDROID - { - ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) - : layout::ExternalFormatANDROID( externalFormat_ ) - {} - - ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) - : layout::ExternalFormatANDROID( rhs ) - {} - - ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExternalFormatANDROID & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) - { - externalFormat = externalFormat_; - return *this; - } - - operator VkExternalFormatANDROID const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalFormatANDROID &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalFormatANDROID const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalFormat == rhs.externalFormat ); - } - - bool operator!=( ExternalFormatANDROID const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalFormatANDROID::sType; - }; - static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - namespace layout - { - struct ExternalImageFormatProperties - { - protected: - ExternalImageFormatProperties( vk::ExternalMemoryProperties externalMemoryProperties_ = vk::ExternalMemoryProperties() ) - : externalMemoryProperties( externalMemoryProperties_ ) - {} - - ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalImageFormatProperties; - void* pNext = nullptr; - vk::ExternalMemoryProperties externalMemoryProperties; - }; - static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "layout struct and wrapper have different size!" ); - } - - struct ExternalImageFormatProperties : public layout::ExternalImageFormatProperties - { - operator VkExternalImageFormatProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalImageFormatProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalImageFormatProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( externalMemoryProperties == rhs.externalMemoryProperties ); - } - - bool operator!=( ExternalImageFormatProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalImageFormatProperties::sType; - }; - static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageFormatProperties - { - operator VkImageFormatProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageFormatProperties const& rhs ) const - { - return ( maxExtent == rhs.maxExtent ) - && ( maxMipLevels == rhs.maxMipLevels ) - && ( maxArrayLayers == rhs.maxArrayLayers ) - && ( sampleCounts == rhs.sampleCounts ) - && ( maxResourceSize == rhs.maxResourceSize ); - } - - bool operator!=( ImageFormatProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Extent3D maxExtent; - uint32_t maxMipLevels; - uint32_t maxArrayLayers; - vk::SampleCountFlags sampleCounts; - vk::DeviceSize maxResourceSize; - }; - static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ExternalImageFormatPropertiesNV - { - operator VkExternalImageFormatPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalImageFormatPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const - { - return ( imageFormatProperties == rhs.imageFormatProperties ) - && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); - } - - bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageFormatProperties imageFormatProperties; - vk::ExternalMemoryFeatureFlagsNV externalMemoryFeatures; - vk::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; - vk::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; - }; - static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExternalMemoryBufferCreateInfo - { - protected: - ExternalMemoryBufferCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) - : handleTypes( handleTypes_ ) - {} - - ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlags handleTypes; - }; - static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ExternalMemoryBufferCreateInfo : public layout::ExternalMemoryBufferCreateInfo - { - ExternalMemoryBufferCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) - : layout::ExternalMemoryBufferCreateInfo( handleTypes_ ) - {} - - ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) - : layout::ExternalMemoryBufferCreateInfo( rhs ) - {} - - ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExternalMemoryBufferCreateInfo & setHandleTypes( vk::ExternalMemoryHandleTypeFlags handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExternalMemoryBufferCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryBufferCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalMemoryBufferCreateInfo::sType; - }; - static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExternalMemoryImageCreateInfo - { - protected: - ExternalMemoryImageCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) - : handleTypes( handleTypes_ ) - {} - - ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlags handleTypes; - }; - static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ExternalMemoryImageCreateInfo : public layout::ExternalMemoryImageCreateInfo - { - ExternalMemoryImageCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) - : layout::ExternalMemoryImageCreateInfo( handleTypes_ ) - {} - - ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) - : layout::ExternalMemoryImageCreateInfo( rhs ) - {} - - ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExternalMemoryImageCreateInfo & setHandleTypes( vk::ExternalMemoryHandleTypeFlags handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExternalMemoryImageCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryImageCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalMemoryImageCreateInfo::sType; - }; - static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExternalMemoryImageCreateInfoNV - { - protected: - ExternalMemoryImageCreateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) - : handleTypes( handleTypes_ ) - {} - - ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagsNV handleTypes; - }; - static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct ExternalMemoryImageCreateInfoNV : public layout::ExternalMemoryImageCreateInfoNV - { - ExternalMemoryImageCreateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) - : layout::ExternalMemoryImageCreateInfoNV( handleTypes_ ) - {} - - ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) - : layout::ExternalMemoryImageCreateInfoNV( rhs ) - {} - - ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ExternalMemoryImageCreateInfoNV & setHandleTypes( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) - { - handleTypes = handleTypes_; - return *this; - } - - operator VkExternalMemoryImageCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalMemoryImageCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleTypes == rhs.handleTypes ); - } - - bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalMemoryImageCreateInfoNV::sType; - }; - static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ExternalSemaphoreProperties - { - protected: - ExternalSemaphoreProperties( vk::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = vk::ExternalSemaphoreHandleTypeFlags(), - vk::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = vk::ExternalSemaphoreHandleTypeFlags(), - vk::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = vk::ExternalSemaphoreFeatureFlags() ) - : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) - {} - - ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eExternalSemaphoreProperties; - void* pNext = nullptr; - vk::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; - vk::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes; - vk::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures; - }; - static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "layout struct and wrapper have different size!" ); - } - - struct ExternalSemaphoreProperties : public layout::ExternalSemaphoreProperties - { - operator VkExternalSemaphoreProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkExternalSemaphoreProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ExternalSemaphoreProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) - && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) - && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); - } - - bool operator!=( ExternalSemaphoreProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ExternalSemaphoreProperties::sType; - }; - static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FenceCreateInfo - { - protected: - FenceCreateInfo( vk::FenceCreateFlags flags_ = vk::FenceCreateFlags() ) - : flags( flags_ ) - {} - - FenceCreateInfo( VkFenceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFenceCreateInfo; - const void* pNext = nullptr; - vk::FenceCreateFlags flags; - }; - static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct FenceCreateInfo : public layout::FenceCreateInfo - { - FenceCreateInfo( vk::FenceCreateFlags flags_ = vk::FenceCreateFlags() ) - : layout::FenceCreateInfo( flags_ ) - {} - - FenceCreateInfo( VkFenceCreateInfo const & rhs ) - : layout::FenceCreateInfo( rhs ) - {} - - FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - FenceCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - FenceCreateInfo & setFlags( vk::FenceCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - operator VkFenceCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFenceCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FenceCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( FenceCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FenceCreateInfo::sType; - }; - static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FenceGetFdInfoKHR - { - protected: - FenceGetFdInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) - : fence( fence_ ) - , handleType( handleType_ ) - {} - - FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFenceGetFdInfoKHR; - const void* pNext = nullptr; - vk::Fence fence; - vk::ExternalFenceHandleTypeFlagBits handleType; - }; - static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct FenceGetFdInfoKHR : public layout::FenceGetFdInfoKHR - { - FenceGetFdInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) - : layout::FenceGetFdInfoKHR( fence_, handleType_ ) - {} - - FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) - : layout::FenceGetFdInfoKHR( rhs ) - {} - - FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - FenceGetFdInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - FenceGetFdInfoKHR & setFence( vk::Fence fence_ ) - { - fence = fence_; - return *this; - } - - FenceGetFdInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkFenceGetFdInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFenceGetFdInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FenceGetFdInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( FenceGetFdInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FenceGetFdInfoKHR::sType; - }; - static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct FenceGetWin32HandleInfoKHR - { - protected: - FenceGetWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) - : fence( fence_ ) - , handleType( handleType_ ) - {} - - FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; - const void* pNext = nullptr; - vk::Fence fence; - vk::ExternalFenceHandleTypeFlagBits handleType; - }; - static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct FenceGetWin32HandleInfoKHR : public layout::FenceGetWin32HandleInfoKHR - { - FenceGetWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) - : layout::FenceGetWin32HandleInfoKHR( fence_, handleType_ ) - {} - - FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) - : layout::FenceGetWin32HandleInfoKHR( rhs ) - {} - - FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - FenceGetWin32HandleInfoKHR & setFence( vk::Fence fence_ ) - { - fence = fence_; - return *this; - } - - FenceGetWin32HandleInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkFenceGetWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFenceGetWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FenceGetWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct FilterCubicImageViewImageFormatPropertiesEXT - { - protected: - FilterCubicImageViewImageFormatPropertiesEXT( vk::Bool32 filterCubic_ = 0, - vk::Bool32 filterCubicMinmax_ = 0 ) - : filterCubic( filterCubic_ ) - , filterCubicMinmax( filterCubicMinmax_ ) - {} - - FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - void* pNext = nullptr; - vk::Bool32 filterCubic; - vk::Bool32 filterCubicMinmax; - }; - static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct FilterCubicImageViewImageFormatPropertiesEXT : public layout::FilterCubicImageViewImageFormatPropertiesEXT - { - operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFilterCubicImageViewImageFormatPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterCubic == rhs.filterCubic ) - && ( filterCubicMinmax == rhs.filterCubicMinmax ); - } - - bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FilterCubicImageViewImageFormatPropertiesEXT::sType; - }; - static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct FormatProperties - { - operator VkFormatProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFormatProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FormatProperties const& rhs ) const - { - return ( linearTilingFeatures == rhs.linearTilingFeatures ) - && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) - && ( bufferFeatures == rhs.bufferFeatures ); - } - - bool operator!=( FormatProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::FormatFeatureFlags linearTilingFeatures; - vk::FormatFeatureFlags optimalTilingFeatures; - vk::FormatFeatureFlags bufferFeatures; - }; - static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FormatProperties2 - { - protected: - FormatProperties2( vk::FormatProperties formatProperties_ = vk::FormatProperties() ) - : formatProperties( formatProperties_ ) - {} - - FormatProperties2( VkFormatProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FormatProperties2& operator=( VkFormatProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFormatProperties2; - void* pNext = nullptr; - vk::FormatProperties formatProperties; - }; - static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "layout struct and wrapper have different size!" ); - } - - struct FormatProperties2 : public layout::FormatProperties2 - { - operator VkFormatProperties2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFormatProperties2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FormatProperties2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( formatProperties == rhs.formatProperties ); - } - - bool operator!=( FormatProperties2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FormatProperties2::sType; - }; - static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FramebufferAttachmentImageInfoKHR - { - protected: - FramebufferAttachmentImageInfoKHR( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(), - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - uint32_t width_ = 0, - uint32_t height_ = 0, - uint32_t layerCount_ = 0, - uint32_t viewFormatCount_ = 0, - const vk::Format* pViewFormats_ = nullptr ) - : flags( flags_ ) - , usage( usage_ ) - , width( width_ ) - , height( height_ ) - , layerCount( layerCount_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) - {} - - FramebufferAttachmentImageInfoKHR( VkFramebufferAttachmentImageInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FramebufferAttachmentImageInfoKHR& operator=( VkFramebufferAttachmentImageInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFramebufferAttachmentImageInfoKHR; - const void* pNext = nullptr; - vk::ImageCreateFlags flags; - vk::ImageUsageFlags usage; - uint32_t width; - uint32_t height; - uint32_t layerCount; - uint32_t viewFormatCount; - const vk::Format* pViewFormats; - }; - static_assert( sizeof( FramebufferAttachmentImageInfoKHR ) == sizeof( VkFramebufferAttachmentImageInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct FramebufferAttachmentImageInfoKHR : public layout::FramebufferAttachmentImageInfoKHR - { - FramebufferAttachmentImageInfoKHR( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(), - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - uint32_t width_ = 0, - uint32_t height_ = 0, - uint32_t layerCount_ = 0, - uint32_t viewFormatCount_ = 0, - const vk::Format* pViewFormats_ = nullptr ) - : layout::FramebufferAttachmentImageInfoKHR( flags_, usage_, width_, height_, layerCount_, viewFormatCount_, pViewFormats_ ) - {} - - FramebufferAttachmentImageInfoKHR( VkFramebufferAttachmentImageInfoKHR const & rhs ) - : layout::FramebufferAttachmentImageInfoKHR( rhs ) - {} - - FramebufferAttachmentImageInfoKHR& operator=( VkFramebufferAttachmentImageInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setFlags( vk::ImageCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setUsage( vk::ImageUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setWidth( uint32_t width_ ) - { - width = width_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setHeight( uint32_t height_ ) - { - height = height_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setLayerCount( uint32_t layerCount_ ) - { - layerCount = layerCount_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) - { - viewFormatCount = viewFormatCount_; - return *this; - } - - FramebufferAttachmentImageInfoKHR & setPViewFormats( const vk::Format* pViewFormats_ ) - { - pViewFormats = pViewFormats_; - return *this; - } - - operator VkFramebufferAttachmentImageInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferAttachmentImageInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FramebufferAttachmentImageInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layerCount == rhs.layerCount ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); - } - - bool operator!=( FramebufferAttachmentImageInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FramebufferAttachmentImageInfoKHR::sType; - }; - static_assert( sizeof( FramebufferAttachmentImageInfoKHR ) == sizeof( VkFramebufferAttachmentImageInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FramebufferAttachmentsCreateInfoKHR - { - protected: - FramebufferAttachmentsCreateInfoKHR( uint32_t attachmentImageInfoCount_ = 0, - const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ = nullptr ) - : attachmentImageInfoCount( attachmentImageInfoCount_ ) - , pAttachmentImageInfos( pAttachmentImageInfos_ ) - {} - - FramebufferAttachmentsCreateInfoKHR( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FramebufferAttachmentsCreateInfoKHR& operator=( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfoKHR; - const void* pNext = nullptr; - uint32_t attachmentImageInfoCount; - const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos; - }; - static_assert( sizeof( FramebufferAttachmentsCreateInfoKHR ) == sizeof( VkFramebufferAttachmentsCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct FramebufferAttachmentsCreateInfoKHR : public layout::FramebufferAttachmentsCreateInfoKHR - { - FramebufferAttachmentsCreateInfoKHR( uint32_t attachmentImageInfoCount_ = 0, - const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ = nullptr ) - : layout::FramebufferAttachmentsCreateInfoKHR( attachmentImageInfoCount_, pAttachmentImageInfos_ ) - {} - - FramebufferAttachmentsCreateInfoKHR( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) - : layout::FramebufferAttachmentsCreateInfoKHR( rhs ) - {} - - FramebufferAttachmentsCreateInfoKHR& operator=( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - FramebufferAttachmentsCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - FramebufferAttachmentsCreateInfoKHR & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) - { - attachmentImageInfoCount = attachmentImageInfoCount_; - return *this; - } - - FramebufferAttachmentsCreateInfoKHR & setPAttachmentImageInfos( const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ ) - { - pAttachmentImageInfos = pAttachmentImageInfos_; - return *this; - } - - operator VkFramebufferAttachmentsCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferAttachmentsCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FramebufferAttachmentsCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) - && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); - } - - bool operator!=( FramebufferAttachmentsCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FramebufferAttachmentsCreateInfoKHR::sType; - }; - static_assert( sizeof( FramebufferAttachmentsCreateInfoKHR ) == sizeof( VkFramebufferAttachmentsCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FramebufferCreateInfo - { - protected: - FramebufferCreateInfo( vk::FramebufferCreateFlags flags_ = vk::FramebufferCreateFlags(), - vk::RenderPass renderPass_ = vk::RenderPass(), - uint32_t attachmentCount_ = 0, - const vk::ImageView* pAttachments_ = nullptr, - uint32_t width_ = 0, - uint32_t height_ = 0, - uint32_t layers_ = 0 ) - : flags( flags_ ) - , renderPass( renderPass_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , width( width_ ) - , height( height_ ) - , layers( layers_ ) - {} - - FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFramebufferCreateInfo; - const void* pNext = nullptr; - vk::FramebufferCreateFlags flags; - vk::RenderPass renderPass; - uint32_t attachmentCount; - const vk::ImageView* pAttachments; - uint32_t width; - uint32_t height; - uint32_t layers; - }; - static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct FramebufferCreateInfo : public layout::FramebufferCreateInfo - { - FramebufferCreateInfo( vk::FramebufferCreateFlags flags_ = vk::FramebufferCreateFlags(), - vk::RenderPass renderPass_ = vk::RenderPass(), - uint32_t attachmentCount_ = 0, - const vk::ImageView* pAttachments_ = nullptr, - uint32_t width_ = 0, - uint32_t height_ = 0, - uint32_t layers_ = 0 ) - : layout::FramebufferCreateInfo( flags_, renderPass_, attachmentCount_, pAttachments_, width_, height_, layers_ ) - {} - - FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) - : layout::FramebufferCreateInfo( rhs ) - {} - - FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - FramebufferCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - FramebufferCreateInfo & setFlags( vk::FramebufferCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - FramebufferCreateInfo & setRenderPass( vk::RenderPass renderPass_ ) - { - renderPass = renderPass_; - return *this; - } - - FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) - { - attachmentCount = attachmentCount_; - return *this; - } - - FramebufferCreateInfo & setPAttachments( const vk::ImageView* pAttachments_ ) - { - pAttachments = pAttachments_; - return *this; - } - - FramebufferCreateInfo & setWidth( uint32_t width_ ) - { - width = width_; - return *this; - } - - FramebufferCreateInfo & setHeight( uint32_t height_ ) - { - height = height_; - return *this; - } - - FramebufferCreateInfo & setLayers( uint32_t layers_ ) - { - layers = layers_; - return *this; - } - - operator VkFramebufferCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FramebufferCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( renderPass == rhs.renderPass ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( layers == rhs.layers ); - } - - bool operator!=( FramebufferCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FramebufferCreateInfo::sType; - }; - static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct FramebufferMixedSamplesCombinationNV - { - protected: - FramebufferMixedSamplesCombinationNV( vk::CoverageReductionModeNV coverageReductionMode_ = vk::CoverageReductionModeNV::eMerge, - vk::SampleCountFlagBits rasterizationSamples_ = vk::SampleCountFlagBits::e1, - vk::SampleCountFlags depthStencilSamples_ = vk::SampleCountFlags(), - vk::SampleCountFlags colorSamples_ = vk::SampleCountFlags() ) - : coverageReductionMode( coverageReductionMode_ ) - , rasterizationSamples( rasterizationSamples_ ) - , depthStencilSamples( depthStencilSamples_ ) - , colorSamples( colorSamples_ ) - {} - - FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; - void* pNext = nullptr; - vk::CoverageReductionModeNV coverageReductionMode; - vk::SampleCountFlagBits rasterizationSamples; - vk::SampleCountFlags depthStencilSamples; - vk::SampleCountFlags colorSamples; - }; - static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "layout struct and wrapper have different size!" ); - } - - struct FramebufferMixedSamplesCombinationNV : public layout::FramebufferMixedSamplesCombinationNV - { - operator VkFramebufferMixedSamplesCombinationNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkFramebufferMixedSamplesCombinationNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( depthStencilSamples == rhs.depthStencilSamples ) - && ( colorSamples == rhs.colorSamples ); - } - - bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::FramebufferMixedSamplesCombinationNV::sType; - }; - static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct VertexInputBindingDescription - { - VertexInputBindingDescription( uint32_t binding_ = 0, - uint32_t stride_ = 0, - vk::VertexInputRate inputRate_ = vk::VertexInputRate::eVertex ) - : binding( binding_ ) - , stride( stride_ ) - , inputRate( inputRate_ ) - {} - - VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - VertexInputBindingDescription & setBinding( uint32_t binding_ ) - { - binding = binding_; - return *this; - } - - VertexInputBindingDescription & setStride( uint32_t stride_ ) - { - stride = stride_; - return *this; - } - - VertexInputBindingDescription & setInputRate( vk::VertexInputRate inputRate_ ) - { - inputRate = inputRate_; - return *this; - } - - operator VkVertexInputBindingDescription const&() const - { - return *reinterpret_cast( this ); - } - - operator VkVertexInputBindingDescription &() - { - return *reinterpret_cast( this ); - } - - bool operator==( VertexInputBindingDescription const& rhs ) const - { - return ( binding == rhs.binding ) - && ( stride == rhs.stride ) - && ( inputRate == rhs.inputRate ); - } - - bool operator!=( VertexInputBindingDescription const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t binding; - uint32_t stride; - vk::VertexInputRate inputRate; - }; - static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct VertexInputAttributeDescription - { - VertexInputAttributeDescription( uint32_t location_ = 0, - uint32_t binding_ = 0, - vk::Format format_ = vk::Format::eUndefined, - uint32_t offset_ = 0 ) - : location( location_ ) - , binding( binding_ ) - , format( format_ ) - , offset( offset_ ) - {} - - VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - VertexInputAttributeDescription & setLocation( uint32_t location_ ) - { - location = location_; - return *this; - } - - VertexInputAttributeDescription & setBinding( uint32_t binding_ ) - { - binding = binding_; - return *this; - } - - VertexInputAttributeDescription & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - VertexInputAttributeDescription & setOffset( uint32_t offset_ ) - { - offset = offset_; - return *this; - } - - operator VkVertexInputAttributeDescription const&() const - { - return *reinterpret_cast( this ); - } - - operator VkVertexInputAttributeDescription &() - { - return *reinterpret_cast( this ); - } - - bool operator==( VertexInputAttributeDescription const& rhs ) const - { - return ( location == rhs.location ) - && ( binding == rhs.binding ) - && ( format == rhs.format ) - && ( offset == rhs.offset ); - } - - bool operator!=( VertexInputAttributeDescription const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t location; - uint32_t binding; - vk::Format format; - uint32_t offset; - }; - static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineVertexInputStateCreateInfo - { - protected: - PipelineVertexInputStateCreateInfo( vk::PipelineVertexInputStateCreateFlags flags_ = vk::PipelineVertexInputStateCreateFlags(), - uint32_t vertexBindingDescriptionCount_ = 0, - const vk::VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, - uint32_t vertexAttributeDescriptionCount_ = 0, - const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) - : flags( flags_ ) - , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) - , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) - , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) - , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) - {} - - PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineVertexInputStateCreateFlags flags; - uint32_t vertexBindingDescriptionCount; - const vk::VertexInputBindingDescription* pVertexBindingDescriptions; - uint32_t vertexAttributeDescriptionCount; - const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions; - }; - static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineVertexInputStateCreateInfo : public layout::PipelineVertexInputStateCreateInfo - { - PipelineVertexInputStateCreateInfo( vk::PipelineVertexInputStateCreateFlags flags_ = vk::PipelineVertexInputStateCreateFlags(), - uint32_t vertexBindingDescriptionCount_ = 0, - const vk::VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, - uint32_t vertexAttributeDescriptionCount_ = 0, - const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) - : layout::PipelineVertexInputStateCreateInfo( flags_, vertexBindingDescriptionCount_, pVertexBindingDescriptions_, vertexAttributeDescriptionCount_, pVertexAttributeDescriptions_ ) - {} - - PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) - : layout::PipelineVertexInputStateCreateInfo( rhs ) - {} - - PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineVertexInputStateCreateInfo & setFlags( vk::PipelineVertexInputStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) - { - vertexBindingDescriptionCount = vertexBindingDescriptionCount_; - return *this; - } - - PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const vk::VertexInputBindingDescription* pVertexBindingDescriptions_ ) - { - pVertexBindingDescriptions = pVertexBindingDescriptions_; - return *this; - } - - PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) - { - vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; - return *this; - } - - PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) - { - pVertexAttributeDescriptions = pVertexAttributeDescriptions_; - return *this; - } - - operator VkPipelineVertexInputStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineVertexInputStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) - && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) - && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) - && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); - } - - bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineVertexInputStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineInputAssemblyStateCreateInfo - { - protected: - PipelineInputAssemblyStateCreateInfo( vk::PipelineInputAssemblyStateCreateFlags flags_ = vk::PipelineInputAssemblyStateCreateFlags(), - vk::PrimitiveTopology topology_ = vk::PrimitiveTopology::ePointList, - vk::Bool32 primitiveRestartEnable_ = 0 ) - : flags( flags_ ) - , topology( topology_ ) - , primitiveRestartEnable( primitiveRestartEnable_ ) - {} - - PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineInputAssemblyStateCreateFlags flags; - vk::PrimitiveTopology topology; - vk::Bool32 primitiveRestartEnable; - }; - static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineInputAssemblyStateCreateInfo : public layout::PipelineInputAssemblyStateCreateInfo - { - PipelineInputAssemblyStateCreateInfo( vk::PipelineInputAssemblyStateCreateFlags flags_ = vk::PipelineInputAssemblyStateCreateFlags(), - vk::PrimitiveTopology topology_ = vk::PrimitiveTopology::ePointList, - vk::Bool32 primitiveRestartEnable_ = 0 ) - : layout::PipelineInputAssemblyStateCreateInfo( flags_, topology_, primitiveRestartEnable_ ) - {} - - PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) - : layout::PipelineInputAssemblyStateCreateInfo( rhs ) - {} - - PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineInputAssemblyStateCreateInfo & setFlags( vk::PipelineInputAssemblyStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineInputAssemblyStateCreateInfo & setTopology( vk::PrimitiveTopology topology_ ) - { - topology = topology_; - return *this; - } - - PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( vk::Bool32 primitiveRestartEnable_ ) - { - primitiveRestartEnable = primitiveRestartEnable_; - return *this; - } - - operator VkPipelineInputAssemblyStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineInputAssemblyStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( topology == rhs.topology ) - && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); - } - - bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineInputAssemblyStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineTessellationStateCreateInfo - { - protected: - PipelineTessellationStateCreateInfo( vk::PipelineTessellationStateCreateFlags flags_ = vk::PipelineTessellationStateCreateFlags(), - uint32_t patchControlPoints_ = 0 ) - : flags( flags_ ) - , patchControlPoints( patchControlPoints_ ) - {} - - PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineTessellationStateCreateFlags flags; - uint32_t patchControlPoints; - }; - static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineTessellationStateCreateInfo : public layout::PipelineTessellationStateCreateInfo - { - PipelineTessellationStateCreateInfo( vk::PipelineTessellationStateCreateFlags flags_ = vk::PipelineTessellationStateCreateFlags(), - uint32_t patchControlPoints_ = 0 ) - : layout::PipelineTessellationStateCreateInfo( flags_, patchControlPoints_ ) - {} - - PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) - : layout::PipelineTessellationStateCreateInfo( rhs ) - {} - - PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineTessellationStateCreateInfo & setFlags( vk::PipelineTessellationStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) - { - patchControlPoints = patchControlPoints_; - return *this; - } - - operator VkPipelineTessellationStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineTessellationStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( patchControlPoints == rhs.patchControlPoints ); - } - - bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineTessellationStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct Viewport - { - Viewport( float x_ = 0, - float y_ = 0, - float width_ = 0, - float height_ = 0, - float minDepth_ = 0, - float maxDepth_ = 0 ) - : x( x_ ) - , y( y_ ) - , width( width_ ) - , height( height_ ) - , minDepth( minDepth_ ) - , maxDepth( maxDepth_ ) - {} - - Viewport( VkViewport const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Viewport& operator=( VkViewport const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Viewport & setX( float x_ ) - { - x = x_; - return *this; - } - - Viewport & setY( float y_ ) - { - y = y_; - return *this; - } - - Viewport & setWidth( float width_ ) - { - width = width_; - return *this; - } - - Viewport & setHeight( float height_ ) - { - height = height_; - return *this; - } - - Viewport & setMinDepth( float minDepth_ ) - { - minDepth = minDepth_; - return *this; - } - - Viewport & setMaxDepth( float maxDepth_ ) - { - maxDepth = maxDepth_; - return *this; - } - - operator VkViewport const&() const - { - return *reinterpret_cast( this ); - } - - operator VkViewport &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Viewport const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( width == rhs.width ) - && ( height == rhs.height ) - && ( minDepth == rhs.minDepth ) - && ( maxDepth == rhs.maxDepth ); - } - - bool operator!=( Viewport const& rhs ) const - { - return !operator==( rhs ); - } - - public: - float x; - float y; - float width; - float height; - float minDepth; - float maxDepth; - }; - static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineViewportStateCreateInfo - { - protected: - PipelineViewportStateCreateInfo( vk::PipelineViewportStateCreateFlags flags_ = vk::PipelineViewportStateCreateFlags(), - uint32_t viewportCount_ = 0, - const vk::Viewport* pViewports_ = nullptr, - uint32_t scissorCount_ = 0, - const vk::Rect2D* pScissors_ = nullptr ) - : flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewports( pViewports_ ) - , scissorCount( scissorCount_ ) - , pScissors( pScissors_ ) - {} - - PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineViewportStateCreateFlags flags; - uint32_t viewportCount; - const vk::Viewport* pViewports; - uint32_t scissorCount; - const vk::Rect2D* pScissors; - }; - static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineViewportStateCreateInfo : public layout::PipelineViewportStateCreateInfo - { - PipelineViewportStateCreateInfo( vk::PipelineViewportStateCreateFlags flags_ = vk::PipelineViewportStateCreateFlags(), - uint32_t viewportCount_ = 0, - const vk::Viewport* pViewports_ = nullptr, - uint32_t scissorCount_ = 0, - const vk::Rect2D* pScissors_ = nullptr ) - : layout::PipelineViewportStateCreateInfo( flags_, viewportCount_, pViewports_, scissorCount_, pScissors_ ) - {} - - PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) - : layout::PipelineViewportStateCreateInfo( rhs ) - {} - - PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineViewportStateCreateInfo & setFlags( vk::PipelineViewportStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) - { - viewportCount = viewportCount_; - return *this; - } - - PipelineViewportStateCreateInfo & setPViewports( const vk::Viewport* pViewports_ ) - { - pViewports = pViewports_; - return *this; - } - - PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) - { - scissorCount = scissorCount_; - return *this; - } - - PipelineViewportStateCreateInfo & setPScissors( const vk::Rect2D* pScissors_ ) - { - pScissors = pScissors_; - return *this; - } - - operator VkPipelineViewportStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineViewportStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineViewportStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewports == rhs.pViewports ) - && ( scissorCount == rhs.scissorCount ) - && ( pScissors == rhs.pScissors ); - } - - bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineViewportStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRasterizationStateCreateInfo - { - protected: - PipelineRasterizationStateCreateInfo( vk::PipelineRasterizationStateCreateFlags flags_ = vk::PipelineRasterizationStateCreateFlags(), - vk::Bool32 depthClampEnable_ = 0, - vk::Bool32 rasterizerDiscardEnable_ = 0, - vk::PolygonMode polygonMode_ = vk::PolygonMode::eFill, - vk::CullModeFlags cullMode_ = vk::CullModeFlags(), - vk::FrontFace frontFace_ = vk::FrontFace::eCounterClockwise, - vk::Bool32 depthBiasEnable_ = 0, - float depthBiasConstantFactor_ = 0, - float depthBiasClamp_ = 0, - float depthBiasSlopeFactor_ = 0, - float lineWidth_ = 0 ) - : flags( flags_ ) - , depthClampEnable( depthClampEnable_ ) - , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) - , polygonMode( polygonMode_ ) - , cullMode( cullMode_ ) - , frontFace( frontFace_ ) - , depthBiasEnable( depthBiasEnable_ ) - , depthBiasConstantFactor( depthBiasConstantFactor_ ) - , depthBiasClamp( depthBiasClamp_ ) - , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) - , lineWidth( lineWidth_ ) - {} - - PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineRasterizationStateCreateFlags flags; - vk::Bool32 depthClampEnable; - vk::Bool32 rasterizerDiscardEnable; - vk::PolygonMode polygonMode; - vk::CullModeFlags cullMode; - vk::FrontFace frontFace; - vk::Bool32 depthBiasEnable; - float depthBiasConstantFactor; - float depthBiasClamp; - float depthBiasSlopeFactor; - float lineWidth; - }; - static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRasterizationStateCreateInfo : public layout::PipelineRasterizationStateCreateInfo - { - PipelineRasterizationStateCreateInfo( vk::PipelineRasterizationStateCreateFlags flags_ = vk::PipelineRasterizationStateCreateFlags(), - vk::Bool32 depthClampEnable_ = 0, - vk::Bool32 rasterizerDiscardEnable_ = 0, - vk::PolygonMode polygonMode_ = vk::PolygonMode::eFill, - vk::CullModeFlags cullMode_ = vk::CullModeFlags(), - vk::FrontFace frontFace_ = vk::FrontFace::eCounterClockwise, - vk::Bool32 depthBiasEnable_ = 0, - float depthBiasConstantFactor_ = 0, - float depthBiasClamp_ = 0, - float depthBiasSlopeFactor_ = 0, - float lineWidth_ = 0 ) - : layout::PipelineRasterizationStateCreateInfo( flags_, depthClampEnable_, rasterizerDiscardEnable_, polygonMode_, cullMode_, frontFace_, depthBiasEnable_, depthBiasConstantFactor_, depthBiasClamp_, depthBiasSlopeFactor_, lineWidth_ ) - {} - - PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) - : layout::PipelineRasterizationStateCreateInfo( rhs ) - {} - - PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setFlags( vk::PipelineRasterizationStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setDepthClampEnable( vk::Bool32 depthClampEnable_ ) - { - depthClampEnable = depthClampEnable_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( vk::Bool32 rasterizerDiscardEnable_ ) - { - rasterizerDiscardEnable = rasterizerDiscardEnable_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setPolygonMode( vk::PolygonMode polygonMode_ ) - { - polygonMode = polygonMode_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setCullMode( vk::CullModeFlags cullMode_ ) - { - cullMode = cullMode_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setFrontFace( vk::FrontFace frontFace_ ) - { - frontFace = frontFace_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setDepthBiasEnable( vk::Bool32 depthBiasEnable_ ) - { - depthBiasEnable = depthBiasEnable_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) - { - depthBiasConstantFactor = depthBiasConstantFactor_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) - { - depthBiasClamp = depthBiasClamp_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) - { - depthBiasSlopeFactor = depthBiasSlopeFactor_; - return *this; - } - - PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) - { - lineWidth = lineWidth_; - return *this; - } - - operator VkPipelineRasterizationStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRasterizationStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthClampEnable == rhs.depthClampEnable ) - && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) - && ( polygonMode == rhs.polygonMode ) - && ( cullMode == rhs.cullMode ) - && ( frontFace == rhs.frontFace ) - && ( depthBiasEnable == rhs.depthBiasEnable ) - && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) - && ( depthBiasClamp == rhs.depthBiasClamp ) - && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) - && ( lineWidth == rhs.lineWidth ); - } - - bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRasterizationStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineMultisampleStateCreateInfo - { - protected: - PipelineMultisampleStateCreateInfo( vk::PipelineMultisampleStateCreateFlags flags_ = vk::PipelineMultisampleStateCreateFlags(), - vk::SampleCountFlagBits rasterizationSamples_ = vk::SampleCountFlagBits::e1, - vk::Bool32 sampleShadingEnable_ = 0, - float minSampleShading_ = 0, - const vk::SampleMask* pSampleMask_ = nullptr, - vk::Bool32 alphaToCoverageEnable_ = 0, - vk::Bool32 alphaToOneEnable_ = 0 ) - : flags( flags_ ) - , rasterizationSamples( rasterizationSamples_ ) - , sampleShadingEnable( sampleShadingEnable_ ) - , minSampleShading( minSampleShading_ ) - , pSampleMask( pSampleMask_ ) - , alphaToCoverageEnable( alphaToCoverageEnable_ ) - , alphaToOneEnable( alphaToOneEnable_ ) - {} - - PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineMultisampleStateCreateFlags flags; - vk::SampleCountFlagBits rasterizationSamples; - vk::Bool32 sampleShadingEnable; - float minSampleShading; - const vk::SampleMask* pSampleMask; - vk::Bool32 alphaToCoverageEnable; - vk::Bool32 alphaToOneEnable; - }; - static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineMultisampleStateCreateInfo : public layout::PipelineMultisampleStateCreateInfo - { - PipelineMultisampleStateCreateInfo( vk::PipelineMultisampleStateCreateFlags flags_ = vk::PipelineMultisampleStateCreateFlags(), - vk::SampleCountFlagBits rasterizationSamples_ = vk::SampleCountFlagBits::e1, - vk::Bool32 sampleShadingEnable_ = 0, - float minSampleShading_ = 0, - const vk::SampleMask* pSampleMask_ = nullptr, - vk::Bool32 alphaToCoverageEnable_ = 0, - vk::Bool32 alphaToOneEnable_ = 0 ) - : layout::PipelineMultisampleStateCreateInfo( flags_, rasterizationSamples_, sampleShadingEnable_, minSampleShading_, pSampleMask_, alphaToCoverageEnable_, alphaToOneEnable_ ) - {} - - PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) - : layout::PipelineMultisampleStateCreateInfo( rhs ) - {} - - PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setFlags( vk::PipelineMultisampleStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setRasterizationSamples( vk::SampleCountFlagBits rasterizationSamples_ ) - { - rasterizationSamples = rasterizationSamples_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setSampleShadingEnable( vk::Bool32 sampleShadingEnable_ ) - { - sampleShadingEnable = sampleShadingEnable_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) - { - minSampleShading = minSampleShading_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setPSampleMask( const vk::SampleMask* pSampleMask_ ) - { - pSampleMask = pSampleMask_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( vk::Bool32 alphaToCoverageEnable_ ) - { - alphaToCoverageEnable = alphaToCoverageEnable_; - return *this; - } - - PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( vk::Bool32 alphaToOneEnable_ ) - { - alphaToOneEnable = alphaToOneEnable_; - return *this; - } - - operator VkPipelineMultisampleStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineMultisampleStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( rasterizationSamples == rhs.rasterizationSamples ) - && ( sampleShadingEnable == rhs.sampleShadingEnable ) - && ( minSampleShading == rhs.minSampleShading ) - && ( pSampleMask == rhs.pSampleMask ) - && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) - && ( alphaToOneEnable == rhs.alphaToOneEnable ); - } - - bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineMultisampleStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct StencilOpState - { - StencilOpState( vk::StencilOp failOp_ = vk::StencilOp::eKeep, - vk::StencilOp passOp_ = vk::StencilOp::eKeep, - vk::StencilOp depthFailOp_ = vk::StencilOp::eKeep, - vk::CompareOp compareOp_ = vk::CompareOp::eNever, - uint32_t compareMask_ = 0, - uint32_t writeMask_ = 0, - uint32_t reference_ = 0 ) - : failOp( failOp_ ) - , passOp( passOp_ ) - , depthFailOp( depthFailOp_ ) - , compareOp( compareOp_ ) - , compareMask( compareMask_ ) - , writeMask( writeMask_ ) - , reference( reference_ ) - {} - - StencilOpState( VkStencilOpState const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - StencilOpState& operator=( VkStencilOpState const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - StencilOpState & setFailOp( vk::StencilOp failOp_ ) - { - failOp = failOp_; - return *this; - } - - StencilOpState & setPassOp( vk::StencilOp passOp_ ) - { - passOp = passOp_; - return *this; - } - - StencilOpState & setDepthFailOp( vk::StencilOp depthFailOp_ ) - { - depthFailOp = depthFailOp_; - return *this; - } - - StencilOpState & setCompareOp( vk::CompareOp compareOp_ ) - { - compareOp = compareOp_; - return *this; - } - - StencilOpState & setCompareMask( uint32_t compareMask_ ) - { - compareMask = compareMask_; - return *this; - } - - StencilOpState & setWriteMask( uint32_t writeMask_ ) - { - writeMask = writeMask_; - return *this; - } - - StencilOpState & setReference( uint32_t reference_ ) - { - reference = reference_; - return *this; - } - - operator VkStencilOpState const&() const - { - return *reinterpret_cast( this ); - } - - operator VkStencilOpState &() - { - return *reinterpret_cast( this ); - } - - bool operator==( StencilOpState const& rhs ) const - { - return ( failOp == rhs.failOp ) - && ( passOp == rhs.passOp ) - && ( depthFailOp == rhs.depthFailOp ) - && ( compareOp == rhs.compareOp ) - && ( compareMask == rhs.compareMask ) - && ( writeMask == rhs.writeMask ) - && ( reference == rhs.reference ); - } - - bool operator!=( StencilOpState const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::StencilOp failOp; - vk::StencilOp passOp; - vk::StencilOp depthFailOp; - vk::CompareOp compareOp; - uint32_t compareMask; - uint32_t writeMask; - uint32_t reference; - }; - static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineDepthStencilStateCreateInfo - { - protected: - PipelineDepthStencilStateCreateInfo( vk::PipelineDepthStencilStateCreateFlags flags_ = vk::PipelineDepthStencilStateCreateFlags(), - vk::Bool32 depthTestEnable_ = 0, - vk::Bool32 depthWriteEnable_ = 0, - vk::CompareOp depthCompareOp_ = vk::CompareOp::eNever, - vk::Bool32 depthBoundsTestEnable_ = 0, - vk::Bool32 stencilTestEnable_ = 0, - vk::StencilOpState front_ = vk::StencilOpState(), - vk::StencilOpState back_ = vk::StencilOpState(), - float minDepthBounds_ = 0, - float maxDepthBounds_ = 0 ) - : flags( flags_ ) - , depthTestEnable( depthTestEnable_ ) - , depthWriteEnable( depthWriteEnable_ ) - , depthCompareOp( depthCompareOp_ ) - , depthBoundsTestEnable( depthBoundsTestEnable_ ) - , stencilTestEnable( stencilTestEnable_ ) - , front( front_ ) - , back( back_ ) - , minDepthBounds( minDepthBounds_ ) - , maxDepthBounds( maxDepthBounds_ ) - {} - - PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineDepthStencilStateCreateFlags flags; - vk::Bool32 depthTestEnable; - vk::Bool32 depthWriteEnable; - vk::CompareOp depthCompareOp; - vk::Bool32 depthBoundsTestEnable; - vk::Bool32 stencilTestEnable; - vk::StencilOpState front; - vk::StencilOpState back; - float minDepthBounds; - float maxDepthBounds; - }; - static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineDepthStencilStateCreateInfo : public layout::PipelineDepthStencilStateCreateInfo - { - PipelineDepthStencilStateCreateInfo( vk::PipelineDepthStencilStateCreateFlags flags_ = vk::PipelineDepthStencilStateCreateFlags(), - vk::Bool32 depthTestEnable_ = 0, - vk::Bool32 depthWriteEnable_ = 0, - vk::CompareOp depthCompareOp_ = vk::CompareOp::eNever, - vk::Bool32 depthBoundsTestEnable_ = 0, - vk::Bool32 stencilTestEnable_ = 0, - vk::StencilOpState front_ = vk::StencilOpState(), - vk::StencilOpState back_ = vk::StencilOpState(), - float minDepthBounds_ = 0, - float maxDepthBounds_ = 0 ) - : layout::PipelineDepthStencilStateCreateInfo( flags_, depthTestEnable_, depthWriteEnable_, depthCompareOp_, depthBoundsTestEnable_, stencilTestEnable_, front_, back_, minDepthBounds_, maxDepthBounds_ ) - {} - - PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) - : layout::PipelineDepthStencilStateCreateInfo( rhs ) - {} - - PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setFlags( vk::PipelineDepthStencilStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setDepthTestEnable( vk::Bool32 depthTestEnable_ ) - { - depthTestEnable = depthTestEnable_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( vk::Bool32 depthWriteEnable_ ) - { - depthWriteEnable = depthWriteEnable_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setDepthCompareOp( vk::CompareOp depthCompareOp_ ) - { - depthCompareOp = depthCompareOp_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( vk::Bool32 depthBoundsTestEnable_ ) - { - depthBoundsTestEnable = depthBoundsTestEnable_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setStencilTestEnable( vk::Bool32 stencilTestEnable_ ) - { - stencilTestEnable = stencilTestEnable_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setFront( vk::StencilOpState front_ ) - { - front = front_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setBack( vk::StencilOpState back_ ) - { - back = back_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) - { - minDepthBounds = minDepthBounds_; - return *this; - } - - PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) - { - maxDepthBounds = maxDepthBounds_; - return *this; - } - - operator VkPipelineDepthStencilStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineDepthStencilStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthTestEnable == rhs.depthTestEnable ) - && ( depthWriteEnable == rhs.depthWriteEnable ) - && ( depthCompareOp == rhs.depthCompareOp ) - && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) - && ( stencilTestEnable == rhs.stencilTestEnable ) - && ( front == rhs.front ) - && ( back == rhs.back ) - && ( minDepthBounds == rhs.minDepthBounds ) - && ( maxDepthBounds == rhs.maxDepthBounds ); - } - - bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineDepthStencilStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineColorBlendAttachmentState - { - PipelineColorBlendAttachmentState( vk::Bool32 blendEnable_ = 0, - vk::BlendFactor srcColorBlendFactor_ = vk::BlendFactor::eZero, - vk::BlendFactor dstColorBlendFactor_ = vk::BlendFactor::eZero, - vk::BlendOp colorBlendOp_ = vk::BlendOp::eAdd, - vk::BlendFactor srcAlphaBlendFactor_ = vk::BlendFactor::eZero, - vk::BlendFactor dstAlphaBlendFactor_ = vk::BlendFactor::eZero, - vk::BlendOp alphaBlendOp_ = vk::BlendOp::eAdd, - vk::ColorComponentFlags colorWriteMask_ = vk::ColorComponentFlags() ) - : blendEnable( blendEnable_ ) - , srcColorBlendFactor( srcColorBlendFactor_ ) - , dstColorBlendFactor( dstColorBlendFactor_ ) - , colorBlendOp( colorBlendOp_ ) - , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) - , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) - , alphaBlendOp( alphaBlendOp_ ) - , colorWriteMask( colorWriteMask_ ) - {} - - PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineColorBlendAttachmentState & setBlendEnable( vk::Bool32 blendEnable_ ) - { - blendEnable = blendEnable_; - return *this; - } - - PipelineColorBlendAttachmentState & setSrcColorBlendFactor( vk::BlendFactor srcColorBlendFactor_ ) - { - srcColorBlendFactor = srcColorBlendFactor_; - return *this; - } - - PipelineColorBlendAttachmentState & setDstColorBlendFactor( vk::BlendFactor dstColorBlendFactor_ ) - { - dstColorBlendFactor = dstColorBlendFactor_; - return *this; - } - - PipelineColorBlendAttachmentState & setColorBlendOp( vk::BlendOp colorBlendOp_ ) - { - colorBlendOp = colorBlendOp_; - return *this; - } - - PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( vk::BlendFactor srcAlphaBlendFactor_ ) - { - srcAlphaBlendFactor = srcAlphaBlendFactor_; - return *this; - } - - PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( vk::BlendFactor dstAlphaBlendFactor_ ) - { - dstAlphaBlendFactor = dstAlphaBlendFactor_; - return *this; - } - - PipelineColorBlendAttachmentState & setAlphaBlendOp( vk::BlendOp alphaBlendOp_ ) - { - alphaBlendOp = alphaBlendOp_; - return *this; - } - - PipelineColorBlendAttachmentState & setColorWriteMask( vk::ColorComponentFlags colorWriteMask_ ) - { - colorWriteMask = colorWriteMask_; - return *this; - } - - operator VkPipelineColorBlendAttachmentState const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineColorBlendAttachmentState &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineColorBlendAttachmentState const& rhs ) const - { - return ( blendEnable == rhs.blendEnable ) - && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) - && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) - && ( colorBlendOp == rhs.colorBlendOp ) - && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) - && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) - && ( alphaBlendOp == rhs.alphaBlendOp ) - && ( colorWriteMask == rhs.colorWriteMask ); - } - - bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Bool32 blendEnable; - vk::BlendFactor srcColorBlendFactor; - vk::BlendFactor dstColorBlendFactor; - vk::BlendOp colorBlendOp; - vk::BlendFactor srcAlphaBlendFactor; - vk::BlendFactor dstAlphaBlendFactor; - vk::BlendOp alphaBlendOp; - vk::ColorComponentFlags colorWriteMask; - }; - static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineColorBlendStateCreateInfo - { - protected: - PipelineColorBlendStateCreateInfo( vk::PipelineColorBlendStateCreateFlags flags_ = vk::PipelineColorBlendStateCreateFlags(), - vk::Bool32 logicOpEnable_ = 0, - vk::LogicOp logicOp_ = vk::LogicOp::eClear, - uint32_t attachmentCount_ = 0, - const vk::PipelineColorBlendAttachmentState* pAttachments_ = nullptr, - std::array const& blendConstants_ = { { 0 } } ) - : flags( flags_ ) - , logicOpEnable( logicOpEnable_ ) - , logicOp( logicOp_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - { - memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); - - } - - PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineColorBlendStateCreateFlags flags; - vk::Bool32 logicOpEnable; - vk::LogicOp logicOp; - uint32_t attachmentCount; - const vk::PipelineColorBlendAttachmentState* pAttachments; - float blendConstants[4]; - }; - static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineColorBlendStateCreateInfo : public layout::PipelineColorBlendStateCreateInfo - { - PipelineColorBlendStateCreateInfo( vk::PipelineColorBlendStateCreateFlags flags_ = vk::PipelineColorBlendStateCreateFlags(), - vk::Bool32 logicOpEnable_ = 0, - vk::LogicOp logicOp_ = vk::LogicOp::eClear, - uint32_t attachmentCount_ = 0, - const vk::PipelineColorBlendAttachmentState* pAttachments_ = nullptr, - std::array const& blendConstants_ = { { 0 } } ) - : layout::PipelineColorBlendStateCreateInfo( flags_, logicOpEnable_, logicOp_, attachmentCount_, pAttachments_, blendConstants_ ) - {} - - PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) - : layout::PipelineColorBlendStateCreateInfo( rhs ) - {} - - PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineColorBlendStateCreateInfo & setFlags( vk::PipelineColorBlendStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineColorBlendStateCreateInfo & setLogicOpEnable( vk::Bool32 logicOpEnable_ ) - { - logicOpEnable = logicOpEnable_; - return *this; - } - - PipelineColorBlendStateCreateInfo & setLogicOp( vk::LogicOp logicOp_ ) - { - logicOp = logicOp_; - return *this; - } - - PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) - { - attachmentCount = attachmentCount_; - return *this; - } - - PipelineColorBlendStateCreateInfo & setPAttachments( const vk::PipelineColorBlendAttachmentState* pAttachments_ ) - { - pAttachments = pAttachments_; - return *this; - } - - PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) - { - memcpy( blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); - return *this; - } - - operator VkPipelineColorBlendStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineColorBlendStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( logicOpEnable == rhs.logicOpEnable ) - && ( logicOp == rhs.logicOp ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 ); - } - - bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineColorBlendStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineDynamicStateCreateInfo - { - protected: - PipelineDynamicStateCreateInfo( vk::PipelineDynamicStateCreateFlags flags_ = vk::PipelineDynamicStateCreateFlags(), - uint32_t dynamicStateCount_ = 0, - const vk::DynamicState* pDynamicStates_ = nullptr ) - : flags( flags_ ) - , dynamicStateCount( dynamicStateCount_ ) - , pDynamicStates( pDynamicStates_ ) - {} - - PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; - const void* pNext = nullptr; - vk::PipelineDynamicStateCreateFlags flags; - uint32_t dynamicStateCount; - const vk::DynamicState* pDynamicStates; - }; - static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineDynamicStateCreateInfo : public layout::PipelineDynamicStateCreateInfo - { - PipelineDynamicStateCreateInfo( vk::PipelineDynamicStateCreateFlags flags_ = vk::PipelineDynamicStateCreateFlags(), - uint32_t dynamicStateCount_ = 0, - const vk::DynamicState* pDynamicStates_ = nullptr ) - : layout::PipelineDynamicStateCreateInfo( flags_, dynamicStateCount_, pDynamicStates_ ) - {} - - PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) - : layout::PipelineDynamicStateCreateInfo( rhs ) - {} - - PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineDynamicStateCreateInfo & setFlags( vk::PipelineDynamicStateCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) - { - dynamicStateCount = dynamicStateCount_; - return *this; - } - - PipelineDynamicStateCreateInfo & setPDynamicStates( const vk::DynamicState* pDynamicStates_ ) - { - pDynamicStates = pDynamicStates_; - return *this; - } - - operator VkPipelineDynamicStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineDynamicStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( dynamicStateCount == rhs.dynamicStateCount ) - && ( pDynamicStates == rhs.pDynamicStates ); - } - - bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineDynamicStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct GraphicsPipelineCreateInfo - { - protected: - GraphicsPipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(), - uint32_t stageCount_ = 0, - const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr, - const vk::PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, - const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, - const vk::PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, - const vk::PipelineViewportStateCreateInfo* pViewportState_ = nullptr, - const vk::PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, - const vk::PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, - const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, - const vk::PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, - const vk::PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, - vk::PipelineLayout layout_ = vk::PipelineLayout(), - vk::RenderPass renderPass_ = vk::RenderPass(), - uint32_t subpass_ = 0, - vk::Pipeline basePipelineHandle_ = vk::Pipeline(), - int32_t basePipelineIndex_ = 0 ) - : flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pInputAssemblyState( pInputAssemblyState_ ) - , pTessellationState( pTessellationState_ ) - , pViewportState( pViewportState_ ) - , pRasterizationState( pRasterizationState_ ) - , pMultisampleState( pMultisampleState_ ) - , pDepthStencilState( pDepthStencilState_ ) - , pColorBlendState( pColorBlendState_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - {} - - GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; - const void* pNext = nullptr; - vk::PipelineCreateFlags flags; - uint32_t stageCount; - const vk::PipelineShaderStageCreateInfo* pStages; - const vk::PipelineVertexInputStateCreateInfo* pVertexInputState; - const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState; - const vk::PipelineTessellationStateCreateInfo* pTessellationState; - const vk::PipelineViewportStateCreateInfo* pViewportState; - const vk::PipelineRasterizationStateCreateInfo* pRasterizationState; - const vk::PipelineMultisampleStateCreateInfo* pMultisampleState; - const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState; - const vk::PipelineColorBlendStateCreateInfo* pColorBlendState; - const vk::PipelineDynamicStateCreateInfo* pDynamicState; - vk::PipelineLayout layout; - vk::RenderPass renderPass; - uint32_t subpass; - vk::Pipeline basePipelineHandle; - int32_t basePipelineIndex; - }; - static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct GraphicsPipelineCreateInfo : public layout::GraphicsPipelineCreateInfo - { - GraphicsPipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(), - uint32_t stageCount_ = 0, - const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr, - const vk::PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, - const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, - const vk::PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, - const vk::PipelineViewportStateCreateInfo* pViewportState_ = nullptr, - const vk::PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, - const vk::PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, - const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, - const vk::PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, - const vk::PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, - vk::PipelineLayout layout_ = vk::PipelineLayout(), - vk::RenderPass renderPass_ = vk::RenderPass(), - uint32_t subpass_ = 0, - vk::Pipeline basePipelineHandle_ = vk::Pipeline(), - int32_t basePipelineIndex_ = 0 ) - : layout::GraphicsPipelineCreateInfo( flags_, stageCount_, pStages_, pVertexInputState_, pInputAssemblyState_, pTessellationState_, pViewportState_, pRasterizationState_, pMultisampleState_, pDepthStencilState_, pColorBlendState_, pDynamicState_, layout_, renderPass_, subpass_, basePipelineHandle_, basePipelineIndex_ ) - {} - - GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) - : layout::GraphicsPipelineCreateInfo( rhs ) - {} - - GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - GraphicsPipelineCreateInfo & setFlags( vk::PipelineCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) - { - stageCount = stageCount_; - return *this; - } - - GraphicsPipelineCreateInfo & setPStages( const vk::PipelineShaderStageCreateInfo* pStages_ ) - { - pStages = pStages_; - return *this; - } - - GraphicsPipelineCreateInfo & setPVertexInputState( const vk::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) - { - pVertexInputState = pVertexInputState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPInputAssemblyState( const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) - { - pInputAssemblyState = pInputAssemblyState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPTessellationState( const vk::PipelineTessellationStateCreateInfo* pTessellationState_ ) - { - pTessellationState = pTessellationState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPViewportState( const vk::PipelineViewportStateCreateInfo* pViewportState_ ) - { - pViewportState = pViewportState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPRasterizationState( const vk::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) - { - pRasterizationState = pRasterizationState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPMultisampleState( const vk::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) - { - pMultisampleState = pMultisampleState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPDepthStencilState( const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) - { - pDepthStencilState = pDepthStencilState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPColorBlendState( const vk::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) - { - pColorBlendState = pColorBlendState_; - return *this; - } - - GraphicsPipelineCreateInfo & setPDynamicState( const vk::PipelineDynamicStateCreateInfo* pDynamicState_ ) - { - pDynamicState = pDynamicState_; - return *this; - } - - GraphicsPipelineCreateInfo & setLayout( vk::PipelineLayout layout_ ) - { - layout = layout_; - return *this; - } - - GraphicsPipelineCreateInfo & setRenderPass( vk::RenderPass renderPass_ ) - { - renderPass = renderPass_; - return *this; - } - - GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) - { - subpass = subpass_; - return *this; - } - - GraphicsPipelineCreateInfo & setBasePipelineHandle( vk::Pipeline basePipelineHandle_ ) - { - basePipelineHandle = basePipelineHandle_; - return *this; - } - - GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) - { - basePipelineIndex = basePipelineIndex_; - return *this; - } - - operator VkGraphicsPipelineCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkGraphicsPipelineCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( GraphicsPipelineCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( pVertexInputState == rhs.pVertexInputState ) - && ( pInputAssemblyState == rhs.pInputAssemblyState ) - && ( pTessellationState == rhs.pTessellationState ) - && ( pViewportState == rhs.pViewportState ) - && ( pRasterizationState == rhs.pRasterizationState ) - && ( pMultisampleState == rhs.pMultisampleState ) - && ( pDepthStencilState == rhs.pDepthStencilState ) - && ( pColorBlendState == rhs.pColorBlendState ) - && ( pDynamicState == rhs.pDynamicState ) - && ( layout == rhs.layout ) - && ( renderPass == rhs.renderPass ) - && ( subpass == rhs.subpass ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); - } - - bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::GraphicsPipelineCreateInfo::sType; - }; - static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct XYColorEXT - { - XYColorEXT( float x_ = 0, - float y_ = 0 ) - : x( x_ ) - , y( y_ ) - {} - - XYColorEXT( VkXYColorEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - XYColorEXT& operator=( VkXYColorEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - XYColorEXT & setX( float x_ ) - { - x = x_; - return *this; - } - - XYColorEXT & setY( float y_ ) - { - y = y_; - return *this; - } - - operator VkXYColorEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkXYColorEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( XYColorEXT const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ); - } - - bool operator!=( XYColorEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - float x; - float y; - }; - static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct HdrMetadataEXT - { - protected: - HdrMetadataEXT( vk::XYColorEXT displayPrimaryRed_ = vk::XYColorEXT(), - vk::XYColorEXT displayPrimaryGreen_ = vk::XYColorEXT(), - vk::XYColorEXT displayPrimaryBlue_ = vk::XYColorEXT(), - vk::XYColorEXT whitePoint_ = vk::XYColorEXT(), - float maxLuminance_ = 0, - float minLuminance_ = 0, - float maxContentLightLevel_ = 0, - float maxFrameAverageLightLevel_ = 0 ) - : displayPrimaryRed( displayPrimaryRed_ ) - , displayPrimaryGreen( displayPrimaryGreen_ ) - , displayPrimaryBlue( displayPrimaryBlue_ ) - , whitePoint( whitePoint_ ) - , maxLuminance( maxLuminance_ ) - , minLuminance( minLuminance_ ) - , maxContentLightLevel( maxContentLightLevel_ ) - , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) - {} - - HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eHdrMetadataEXT; - const void* pNext = nullptr; - vk::XYColorEXT displayPrimaryRed; - vk::XYColorEXT displayPrimaryGreen; - vk::XYColorEXT displayPrimaryBlue; - vk::XYColorEXT whitePoint; - float maxLuminance; - float minLuminance; - float maxContentLightLevel; - float maxFrameAverageLightLevel; - }; - static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "layout struct and wrapper have different size!" ); - } - - struct HdrMetadataEXT : public layout::HdrMetadataEXT - { - HdrMetadataEXT( vk::XYColorEXT displayPrimaryRed_ = vk::XYColorEXT(), - vk::XYColorEXT displayPrimaryGreen_ = vk::XYColorEXT(), - vk::XYColorEXT displayPrimaryBlue_ = vk::XYColorEXT(), - vk::XYColorEXT whitePoint_ = vk::XYColorEXT(), - float maxLuminance_ = 0, - float minLuminance_ = 0, - float maxContentLightLevel_ = 0, - float maxFrameAverageLightLevel_ = 0 ) - : layout::HdrMetadataEXT( displayPrimaryRed_, displayPrimaryGreen_, displayPrimaryBlue_, whitePoint_, maxLuminance_, minLuminance_, maxContentLightLevel_, maxFrameAverageLightLevel_ ) - {} - - HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) - : layout::HdrMetadataEXT( rhs ) - {} - - HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - HdrMetadataEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryRed( vk::XYColorEXT displayPrimaryRed_ ) - { - displayPrimaryRed = displayPrimaryRed_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryGreen( vk::XYColorEXT displayPrimaryGreen_ ) - { - displayPrimaryGreen = displayPrimaryGreen_; - return *this; - } - - HdrMetadataEXT & setDisplayPrimaryBlue( vk::XYColorEXT displayPrimaryBlue_ ) - { - displayPrimaryBlue = displayPrimaryBlue_; - return *this; - } - - HdrMetadataEXT & setWhitePoint( vk::XYColorEXT whitePoint_ ) - { - whitePoint = whitePoint_; - return *this; - } - - HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) - { - maxLuminance = maxLuminance_; - return *this; - } - - HdrMetadataEXT & setMinLuminance( float minLuminance_ ) - { - minLuminance = minLuminance_; - return *this; - } - - HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) - { - maxContentLightLevel = maxContentLightLevel_; - return *this; - } - - HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) - { - maxFrameAverageLightLevel = maxFrameAverageLightLevel_; - return *this; - } - - operator VkHdrMetadataEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkHdrMetadataEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( HdrMetadataEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( displayPrimaryRed == rhs.displayPrimaryRed ) - && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) - && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) - && ( whitePoint == rhs.whitePoint ) - && ( maxLuminance == rhs.maxLuminance ) - && ( minLuminance == rhs.minLuminance ) - && ( maxContentLightLevel == rhs.maxContentLightLevel ) - && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); - } - - bool operator!=( HdrMetadataEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::HdrMetadataEXT::sType; - }; - static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct HeadlessSurfaceCreateInfoEXT - { - protected: - HeadlessSurfaceCreateInfoEXT( vk::HeadlessSurfaceCreateFlagsEXT flags_ = vk::HeadlessSurfaceCreateFlagsEXT() ) - : flags( flags_ ) - {} - - HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; - const void* pNext = nullptr; - vk::HeadlessSurfaceCreateFlagsEXT flags; - }; - static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct HeadlessSurfaceCreateInfoEXT : public layout::HeadlessSurfaceCreateInfoEXT - { - HeadlessSurfaceCreateInfoEXT( vk::HeadlessSurfaceCreateFlagsEXT flags_ = vk::HeadlessSurfaceCreateFlagsEXT() ) - : layout::HeadlessSurfaceCreateInfoEXT( flags_ ) - {} - - HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) - : layout::HeadlessSurfaceCreateInfoEXT( rhs ) - {} - - HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - HeadlessSurfaceCreateInfoEXT & setFlags( vk::HeadlessSurfaceCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - operator VkHeadlessSurfaceCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkHeadlessSurfaceCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::HeadlessSurfaceCreateInfoEXT::sType; - }; - static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_IOS_MVK - - namespace layout - { - struct IOSSurfaceCreateInfoMVK - { - protected: - IOSSurfaceCreateInfoMVK( vk::IOSSurfaceCreateFlagsMVK flags_ = vk::IOSSurfaceCreateFlagsMVK(), - const void* pView_ = nullptr ) - : flags( flags_ ) - , pView( pView_ ) - {} - - IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; - const void* pNext = nullptr; - vk::IOSSurfaceCreateFlagsMVK flags; - const void* pView; - }; - static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "layout struct and wrapper have different size!" ); - } - - struct IOSSurfaceCreateInfoMVK : public layout::IOSSurfaceCreateInfoMVK - { - IOSSurfaceCreateInfoMVK( vk::IOSSurfaceCreateFlagsMVK flags_ = vk::IOSSurfaceCreateFlagsMVK(), - const void* pView_ = nullptr ) - : layout::IOSSurfaceCreateInfoMVK( flags_, pView_ ) - {} - - IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) - : layout::IOSSurfaceCreateInfoMVK( rhs ) - {} - - IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - IOSSurfaceCreateInfoMVK & setFlags( vk::IOSSurfaceCreateFlagsMVK flags_ ) - { - flags = flags_; - return *this; - } - - IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) - { - pView = pView_; - return *this; - } - - operator VkIOSSurfaceCreateInfoMVK const&() const - { - return *reinterpret_cast( this ); - } - - operator VkIOSSurfaceCreateInfoMVK &() - { - return *reinterpret_cast( this ); - } - - bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); - } - - bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::IOSSurfaceCreateInfoMVK::sType; - }; - static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - - struct ImageBlit - { - ImageBlit( vk::ImageSubresourceLayers srcSubresource_ = vk::ImageSubresourceLayers(), - std::array const& srcOffsets_ = { { vk::Offset3D() } }, - vk::ImageSubresourceLayers dstSubresource_ = vk::ImageSubresourceLayers(), - std::array const& dstOffsets_ = { { vk::Offset3D() } } ) - : srcSubresource( srcSubresource_ ) - , dstSubresource( dstSubresource_ ) - { - memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( vk::Offset3D ) ); - memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( vk::Offset3D ) ); - - } - - ImageBlit( VkImageBlit const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageBlit& operator=( VkImageBlit const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageBlit & setSrcSubresource( vk::ImageSubresourceLayers srcSubresource_ ) - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageBlit & setSrcOffsets( std::array srcOffsets_ ) - { - memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( vk::Offset3D ) ); - return *this; - } - - ImageBlit & setDstSubresource( vk::ImageSubresourceLayers dstSubresource_ ) - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageBlit & setDstOffsets( std::array dstOffsets_ ) - { - memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( vk::Offset3D ) ); - return *this; - } - - operator VkImageBlit const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageBlit &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageBlit const& rhs ) const - { - return ( srcSubresource == rhs.srcSubresource ) - && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( vk::Offset3D ) ) == 0 ) - && ( dstSubresource == rhs.dstSubresource ) - && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( vk::Offset3D ) ) == 0 ); - } - - bool operator!=( ImageBlit const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageSubresourceLayers srcSubresource; - vk::Offset3D srcOffsets[2]; - vk::ImageSubresourceLayers dstSubresource; - vk::Offset3D dstOffsets[2]; - }; - static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageCopy - { - ImageCopy( vk::ImageSubresourceLayers srcSubresource_ = vk::ImageSubresourceLayers(), - vk::Offset3D srcOffset_ = vk::Offset3D(), - vk::ImageSubresourceLayers dstSubresource_ = vk::ImageSubresourceLayers(), - vk::Offset3D dstOffset_ = vk::Offset3D(), - vk::Extent3D extent_ = vk::Extent3D() ) - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - {} - - ImageCopy( VkImageCopy const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageCopy& operator=( VkImageCopy const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageCopy & setSrcSubresource( vk::ImageSubresourceLayers srcSubresource_ ) - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageCopy & setSrcOffset( vk::Offset3D srcOffset_ ) - { - srcOffset = srcOffset_; - return *this; - } - - ImageCopy & setDstSubresource( vk::ImageSubresourceLayers dstSubresource_ ) - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageCopy & setDstOffset( vk::Offset3D dstOffset_ ) - { - dstOffset = dstOffset_; - return *this; - } - - ImageCopy & setExtent( vk::Extent3D extent_ ) - { - extent = extent_; - return *this; - } - - operator VkImageCopy const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageCopy &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageCopy const& rhs ) const - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); - } - - bool operator!=( ImageCopy const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageSubresourceLayers srcSubresource; - vk::Offset3D srcOffset; - vk::ImageSubresourceLayers dstSubresource; - vk::Offset3D dstOffset; - vk::Extent3D extent; - }; - static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageCreateInfo - { - protected: - ImageCreateInfo( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(), - vk::ImageType imageType_ = vk::ImageType::e1D, - vk::Format format_ = vk::Format::eUndefined, - vk::Extent3D extent_ = vk::Extent3D(), - uint32_t mipLevels_ = 0, - uint32_t arrayLayers_ = 0, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal, - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr, - vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined ) - : flags( flags_ ) - , imageType( imageType_ ) - , format( format_ ) - , extent( extent_ ) - , mipLevels( mipLevels_ ) - , arrayLayers( arrayLayers_ ) - , samples( samples_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , initialLayout( initialLayout_ ) - {} - - ImageCreateInfo( VkImageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageCreateInfo; - const void* pNext = nullptr; - vk::ImageCreateFlags flags; - vk::ImageType imageType; - vk::Format format; - vk::Extent3D extent; - uint32_t mipLevels; - uint32_t arrayLayers; - vk::SampleCountFlagBits samples; - vk::ImageTiling tiling; - vk::ImageUsageFlags usage; - vk::SharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - vk::ImageLayout initialLayout; - }; - static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ImageCreateInfo : public layout::ImageCreateInfo - { - ImageCreateInfo( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(), - vk::ImageType imageType_ = vk::ImageType::e1D, - vk::Format format_ = vk::Format::eUndefined, - vk::Extent3D extent_ = vk::Extent3D(), - uint32_t mipLevels_ = 0, - uint32_t arrayLayers_ = 0, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal, - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr, - vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined ) - : layout::ImageCreateInfo( flags_, imageType_, format_, extent_, mipLevels_, arrayLayers_, samples_, tiling_, usage_, sharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_, initialLayout_ ) - {} - - ImageCreateInfo( VkImageCreateInfo const & rhs ) - : layout::ImageCreateInfo( rhs ) - {} - - ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageCreateInfo & setFlags( vk::ImageCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - ImageCreateInfo & setImageType( vk::ImageType imageType_ ) - { - imageType = imageType_; - return *this; - } - - ImageCreateInfo & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - ImageCreateInfo & setExtent( vk::Extent3D extent_ ) - { - extent = extent_; - return *this; - } - - ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) - { - mipLevels = mipLevels_; - return *this; - } - - ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) - { - arrayLayers = arrayLayers_; - return *this; - } - - ImageCreateInfo & setSamples( vk::SampleCountFlagBits samples_ ) - { - samples = samples_; - return *this; - } - - ImageCreateInfo & setTiling( vk::ImageTiling tiling_ ) - { - tiling = tiling_; - return *this; - } - - ImageCreateInfo & setUsage( vk::ImageUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - ImageCreateInfo & setSharingMode( vk::SharingMode sharingMode_ ) - { - sharingMode = sharingMode_; - return *this; - } - - ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - ImageCreateInfo & setInitialLayout( vk::ImageLayout initialLayout_ ) - { - initialLayout = initialLayout_; - return *this; - } - - operator VkImageCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( imageType == rhs.imageType ) - && ( format == rhs.format ) - && ( extent == rhs.extent ) - && ( mipLevels == rhs.mipLevels ) - && ( arrayLayers == rhs.arrayLayers ) - && ( samples == rhs.samples ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( initialLayout == rhs.initialLayout ); - } - - bool operator!=( ImageCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageCreateInfo::sType; - }; - static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubresourceLayout - { - operator VkSubresourceLayout const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubresourceLayout &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubresourceLayout const& rhs ) const - { - return ( offset == rhs.offset ) - && ( size == rhs.size ) - && ( rowPitch == rhs.rowPitch ) - && ( arrayPitch == rhs.arrayPitch ) - && ( depthPitch == rhs.depthPitch ); - } - - bool operator!=( SubresourceLayout const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DeviceSize offset; - vk::DeviceSize size; - vk::DeviceSize rowPitch; - vk::DeviceSize arrayPitch; - vk::DeviceSize depthPitch; - }; - static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageDrmFormatModifierExplicitCreateInfoEXT - { - protected: - ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0, - uint32_t drmFormatModifierPlaneCount_ = 0, - const vk::SubresourceLayout* pPlaneLayouts_ = nullptr ) - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , pPlaneLayouts( pPlaneLayouts_ ) - {} - - ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; - const void* pNext = nullptr; - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - const vk::SubresourceLayout* pPlaneLayouts; - }; - static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ImageDrmFormatModifierExplicitCreateInfoEXT : public layout::ImageDrmFormatModifierExplicitCreateInfoEXT - { - ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0, - uint32_t drmFormatModifierPlaneCount_ = 0, - const vk::SubresourceLayout* pPlaneLayouts_ = nullptr ) - : layout::ImageDrmFormatModifierExplicitCreateInfoEXT( drmFormatModifier_, drmFormatModifierPlaneCount_, pPlaneLayouts_ ) - {} - - ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) - : layout::ImageDrmFormatModifierExplicitCreateInfoEXT( rhs ) - {} - - ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) - { - drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; - return *this; - } - - ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const vk::SubresourceLayout* pPlaneLayouts_ ) - { - pPlaneLayouts = pPlaneLayouts_; - return *this; - } - - operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) - && ( pPlaneLayouts == rhs.pPlaneLayouts ); - } - - bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageDrmFormatModifierExplicitCreateInfoEXT::sType; - }; - static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageDrmFormatModifierListCreateInfoEXT - { - protected: - ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0, - const uint64_t* pDrmFormatModifiers_ = nullptr ) - : drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifiers( pDrmFormatModifiers_ ) - {} - - ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; - const void* pNext = nullptr; - uint32_t drmFormatModifierCount; - const uint64_t* pDrmFormatModifiers; - }; - static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ImageDrmFormatModifierListCreateInfoEXT : public layout::ImageDrmFormatModifierListCreateInfoEXT - { - ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0, - const uint64_t* pDrmFormatModifiers_ = nullptr ) - : layout::ImageDrmFormatModifierListCreateInfoEXT( drmFormatModifierCount_, pDrmFormatModifiers_ ) - {} - - ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) - : layout::ImageDrmFormatModifierListCreateInfoEXT( rhs ) - {} - - ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) - { - drmFormatModifierCount = drmFormatModifierCount_; - return *this; - } - - ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) - { - pDrmFormatModifiers = pDrmFormatModifiers_; - return *this; - } - - operator VkImageDrmFormatModifierListCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierListCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) - && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); - } - - bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageDrmFormatModifierListCreateInfoEXT::sType; - }; - static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageDrmFormatModifierPropertiesEXT - { - protected: - ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = 0 ) - : drmFormatModifier( drmFormatModifier_ ) - {} - - ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; - void* pNext = nullptr; - uint64_t drmFormatModifier; - }; - static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct ImageDrmFormatModifierPropertiesEXT : public layout::ImageDrmFormatModifierPropertiesEXT - { - operator VkImageDrmFormatModifierPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageDrmFormatModifierPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ); - } - - bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageDrmFormatModifierPropertiesEXT::sType; - }; - static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageFormatListCreateInfoKHR - { - protected: - ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0, - const vk::Format* pViewFormats_ = nullptr ) - : viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) - {} - - ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageFormatListCreateInfoKHR; - const void* pNext = nullptr; - uint32_t viewFormatCount; - const vk::Format* pViewFormats; - }; - static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImageFormatListCreateInfoKHR : public layout::ImageFormatListCreateInfoKHR - { - ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0, - const vk::Format* pViewFormats_ = nullptr ) - : layout::ImageFormatListCreateInfoKHR( viewFormatCount_, pViewFormats_ ) - {} - - ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) - : layout::ImageFormatListCreateInfoKHR( rhs ) - {} - - ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageFormatListCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageFormatListCreateInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) - { - viewFormatCount = viewFormatCount_; - return *this; - } - - ImageFormatListCreateInfoKHR & setPViewFormats( const vk::Format* pViewFormats_ ) - { - pViewFormats = pViewFormats_; - return *this; - } - - operator VkImageFormatListCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatListCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewFormatCount == rhs.viewFormatCount ) - && ( pViewFormats == rhs.pViewFormats ); - } - - bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageFormatListCreateInfoKHR::sType; - }; - static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageFormatProperties2 - { - protected: - ImageFormatProperties2( vk::ImageFormatProperties imageFormatProperties_ = vk::ImageFormatProperties() ) - : imageFormatProperties( imageFormatProperties_ ) - {} - - ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageFormatProperties2; - void* pNext = nullptr; - vk::ImageFormatProperties imageFormatProperties; - }; - static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "layout struct and wrapper have different size!" ); - } - - struct ImageFormatProperties2 : public layout::ImageFormatProperties2 - { - operator VkImageFormatProperties2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageFormatProperties2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageFormatProperties2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFormatProperties == rhs.imageFormatProperties ); - } - - bool operator!=( ImageFormatProperties2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageFormatProperties2::sType; - }; - static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageSubresourceRange - { - ImageSubresourceRange( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(), - uint32_t baseMipLevel_ = 0, - uint32_t levelCount_ = 0, - uint32_t baseArrayLayer_ = 0, - uint32_t layerCount_ = 0 ) - : aspectMask( aspectMask_ ) - , baseMipLevel( baseMipLevel_ ) - , levelCount( levelCount_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - {} - - ImageSubresourceRange( VkImageSubresourceRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageSubresourceRange & setAspectMask( vk::ImageAspectFlags aspectMask_ ) - { - aspectMask = aspectMask_; - return *this; - } - - ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) - { - baseMipLevel = baseMipLevel_; - return *this; - } - - ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) - { - levelCount = levelCount_; - return *this; - } - - ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) - { - baseArrayLayer = baseArrayLayer_; - return *this; - } - - ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) - { - layerCount = layerCount_; - return *this; - } - - operator VkImageSubresourceRange const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresourceRange &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSubresourceRange const& rhs ) const - { - return ( aspectMask == rhs.aspectMask ) - && ( baseMipLevel == rhs.baseMipLevel ) - && ( levelCount == rhs.levelCount ) - && ( baseArrayLayer == rhs.baseArrayLayer ) - && ( layerCount == rhs.layerCount ); - } - - bool operator!=( ImageSubresourceRange const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageAspectFlags aspectMask; - uint32_t baseMipLevel; - uint32_t levelCount; - uint32_t baseArrayLayer; - uint32_t layerCount; - }; - static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageMemoryBarrier - { - protected: - ImageMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - vk::ImageLayout oldLayout_ = vk::ImageLayout::eUndefined, - vk::ImageLayout newLayout_ = vk::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = 0, - uint32_t dstQueueFamilyIndex_ = 0, - vk::Image image_ = vk::Image(), - vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) - {} - - ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageMemoryBarrier; - const void* pNext = nullptr; - vk::AccessFlags srcAccessMask; - vk::AccessFlags dstAccessMask; - vk::ImageLayout oldLayout; - vk::ImageLayout newLayout; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - vk::Image image; - vk::ImageSubresourceRange subresourceRange; - }; - static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "layout struct and wrapper have different size!" ); - } - - struct ImageMemoryBarrier : public layout::ImageMemoryBarrier - { - ImageMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - vk::ImageLayout oldLayout_ = vk::ImageLayout::eUndefined, - vk::ImageLayout newLayout_ = vk::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = 0, - uint32_t dstQueueFamilyIndex_ = 0, - vk::Image image_ = vk::Image(), - vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) - : layout::ImageMemoryBarrier( srcAccessMask_, dstAccessMask_, oldLayout_, newLayout_, srcQueueFamilyIndex_, dstQueueFamilyIndex_, image_, subresourceRange_ ) - {} - - ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) - : layout::ImageMemoryBarrier( rhs ) - {} - - ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageMemoryBarrier & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageMemoryBarrier & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) - { - srcAccessMask = srcAccessMask_; - return *this; - } - - ImageMemoryBarrier & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) - { - dstAccessMask = dstAccessMask_; - return *this; - } - - ImageMemoryBarrier & setOldLayout( vk::ImageLayout oldLayout_ ) - { - oldLayout = oldLayout_; - return *this; - } - - ImageMemoryBarrier & setNewLayout( vk::ImageLayout newLayout_ ) - { - newLayout = newLayout_; - return *this; - } - - ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) - { - srcQueueFamilyIndex = srcQueueFamilyIndex_; - return *this; - } - - ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) - { - dstQueueFamilyIndex = dstQueueFamilyIndex_; - return *this; - } - - ImageMemoryBarrier & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - ImageMemoryBarrier & setSubresourceRange( vk::ImageSubresourceRange subresourceRange_ ) - { - subresourceRange = subresourceRange_; - return *this; - } - - operator VkImageMemoryBarrier const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageMemoryBarrier &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageMemoryBarrier const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( oldLayout == rhs.oldLayout ) - && ( newLayout == rhs.newLayout ) - && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) - && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) - && ( image == rhs.image ) - && ( subresourceRange == rhs.subresourceRange ); - } - - bool operator!=( ImageMemoryBarrier const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageMemoryBarrier::sType; - }; - static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageMemoryRequirementsInfo2 - { - protected: - ImageMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) - : image( image_ ) - {} - - ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; - const void* pNext = nullptr; - vk::Image image; - }; - static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "layout struct and wrapper have different size!" ); - } - - struct ImageMemoryRequirementsInfo2 : public layout::ImageMemoryRequirementsInfo2 - { - ImageMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) - : layout::ImageMemoryRequirementsInfo2( image_ ) - {} - - ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) - : layout::ImageMemoryRequirementsInfo2( rhs ) - {} - - ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageMemoryRequirementsInfo2 & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - operator VkImageMemoryRequirementsInfo2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageMemoryRequirementsInfo2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); - } - - bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageMemoryRequirementsInfo2::sType; - }; - static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_FUCHSIA - - namespace layout - { - struct ImagePipeSurfaceCreateInfoFUCHSIA - { - protected: - ImagePipeSurfaceCreateInfoFUCHSIA( vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = vk::ImagePipeSurfaceCreateFlagsFUCHSIA(), - zx_handle_t imagePipeHandle_ = 0 ) - : flags( flags_ ) - , imagePipeHandle( imagePipeHandle_ ) - {} - - ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; - const void* pNext = nullptr; - vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags; - zx_handle_t imagePipeHandle; - }; - static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "layout struct and wrapper have different size!" ); - } - - struct ImagePipeSurfaceCreateInfoFUCHSIA : public layout::ImagePipeSurfaceCreateInfoFUCHSIA - { - ImagePipeSurfaceCreateInfoFUCHSIA( vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = vk::ImagePipeSurfaceCreateFlagsFUCHSIA(), - zx_handle_t imagePipeHandle_ = 0 ) - : layout::ImagePipeSurfaceCreateInfoFUCHSIA( flags_, imagePipeHandle_ ) - {} - - ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) - : layout::ImagePipeSurfaceCreateInfoFUCHSIA( rhs ) - {} - - ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) - { - flags = flags_; - return *this; - } - - ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) - { - imagePipeHandle = imagePipeHandle_; - return *this; - } - - operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImagePipeSurfaceCreateInfoFUCHSIA &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( imagePipeHandle == rhs.imagePipeHandle ); - } - - bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImagePipeSurfaceCreateInfoFUCHSIA::sType; - }; - static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - namespace layout - { - struct ImagePlaneMemoryRequirementsInfo - { - protected: - ImagePlaneMemoryRequirementsInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) - : planeAspect( planeAspect_ ) - {} - - ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; - const void* pNext = nullptr; - vk::ImageAspectFlagBits planeAspect; - }; - static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "layout struct and wrapper have different size!" ); - } - - struct ImagePlaneMemoryRequirementsInfo : public layout::ImagePlaneMemoryRequirementsInfo - { - ImagePlaneMemoryRequirementsInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) - : layout::ImagePlaneMemoryRequirementsInfo( planeAspect_ ) - {} - - ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) - : layout::ImagePlaneMemoryRequirementsInfo( rhs ) - {} - - ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImagePlaneMemoryRequirementsInfo & setPlaneAspect( vk::ImageAspectFlagBits planeAspect_ ) - { - planeAspect = planeAspect_; - return *this; - } - - operator VkImagePlaneMemoryRequirementsInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImagePlaneMemoryRequirementsInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( planeAspect == rhs.planeAspect ); - } - - bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImagePlaneMemoryRequirementsInfo::sType; - }; - static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ImageResolve - { - ImageResolve( vk::ImageSubresourceLayers srcSubresource_ = vk::ImageSubresourceLayers(), - vk::Offset3D srcOffset_ = vk::Offset3D(), - vk::ImageSubresourceLayers dstSubresource_ = vk::ImageSubresourceLayers(), - vk::Offset3D dstOffset_ = vk::Offset3D(), - vk::Extent3D extent_ = vk::Extent3D() ) - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - {} - - ImageResolve( VkImageResolve const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageResolve& operator=( VkImageResolve const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageResolve & setSrcSubresource( vk::ImageSubresourceLayers srcSubresource_ ) - { - srcSubresource = srcSubresource_; - return *this; - } - - ImageResolve & setSrcOffset( vk::Offset3D srcOffset_ ) - { - srcOffset = srcOffset_; - return *this; - } - - ImageResolve & setDstSubresource( vk::ImageSubresourceLayers dstSubresource_ ) - { - dstSubresource = dstSubresource_; - return *this; - } - - ImageResolve & setDstOffset( vk::Offset3D dstOffset_ ) - { - dstOffset = dstOffset_; - return *this; - } - - ImageResolve & setExtent( vk::Extent3D extent_ ) - { - extent = extent_; - return *this; - } - - operator VkImageResolve const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageResolve &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageResolve const& rhs ) const - { - return ( srcSubresource == rhs.srcSubresource ) - && ( srcOffset == rhs.srcOffset ) - && ( dstSubresource == rhs.dstSubresource ) - && ( dstOffset == rhs.dstOffset ) - && ( extent == rhs.extent ); - } - - bool operator!=( ImageResolve const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageSubresourceLayers srcSubresource; - vk::Offset3D srcOffset; - vk::ImageSubresourceLayers dstSubresource; - vk::Offset3D dstOffset; - vk::Extent3D extent; - }; - static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageSparseMemoryRequirementsInfo2 - { - protected: - ImageSparseMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) - : image( image_ ) - {} - - ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; - const void* pNext = nullptr; - vk::Image image; - }; - static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "layout struct and wrapper have different size!" ); - } - - struct ImageSparseMemoryRequirementsInfo2 : public layout::ImageSparseMemoryRequirementsInfo2 - { - ImageSparseMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) - : layout::ImageSparseMemoryRequirementsInfo2( image_ ) - {} - - ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) - : layout::ImageSparseMemoryRequirementsInfo2( rhs ) - {} - - ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageSparseMemoryRequirementsInfo2 & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - operator VkImageSparseMemoryRequirementsInfo2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageSparseMemoryRequirementsInfo2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ); - } - - bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageSparseMemoryRequirementsInfo2::sType; - }; - static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageStencilUsageCreateInfoEXT - { - protected: - ImageStencilUsageCreateInfoEXT( vk::ImageUsageFlags stencilUsage_ = vk::ImageUsageFlags() ) - : stencilUsage( stencilUsage_ ) - {} - - ImageStencilUsageCreateInfoEXT( VkImageStencilUsageCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageStencilUsageCreateInfoEXT& operator=( VkImageStencilUsageCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageStencilUsageCreateInfoEXT; - const void* pNext = nullptr; - vk::ImageUsageFlags stencilUsage; - }; - static_assert( sizeof( ImageStencilUsageCreateInfoEXT ) == sizeof( VkImageStencilUsageCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ImageStencilUsageCreateInfoEXT : public layout::ImageStencilUsageCreateInfoEXT - { - ImageStencilUsageCreateInfoEXT( vk::ImageUsageFlags stencilUsage_ = vk::ImageUsageFlags() ) - : layout::ImageStencilUsageCreateInfoEXT( stencilUsage_ ) - {} - - ImageStencilUsageCreateInfoEXT( VkImageStencilUsageCreateInfoEXT const & rhs ) - : layout::ImageStencilUsageCreateInfoEXT( rhs ) - {} - - ImageStencilUsageCreateInfoEXT& operator=( VkImageStencilUsageCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageStencilUsageCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageStencilUsageCreateInfoEXT & setStencilUsage( vk::ImageUsageFlags stencilUsage_ ) - { - stencilUsage = stencilUsage_; - return *this; - } - - operator VkImageStencilUsageCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageStencilUsageCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageStencilUsageCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stencilUsage == rhs.stencilUsage ); - } - - bool operator!=( ImageStencilUsageCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageStencilUsageCreateInfoEXT::sType; - }; - static_assert( sizeof( ImageStencilUsageCreateInfoEXT ) == sizeof( VkImageStencilUsageCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageSwapchainCreateInfoKHR - { - protected: - ImageSwapchainCreateInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR() ) - : swapchain( swapchain_ ) - {} - - ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; - const void* pNext = nullptr; - vk::SwapchainKHR swapchain; - }; - static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImageSwapchainCreateInfoKHR : public layout::ImageSwapchainCreateInfoKHR - { - ImageSwapchainCreateInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR() ) - : layout::ImageSwapchainCreateInfoKHR( swapchain_ ) - {} - - ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) - : layout::ImageSwapchainCreateInfoKHR( rhs ) - {} - - ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageSwapchainCreateInfoKHR & setSwapchain( vk::SwapchainKHR swapchain_ ) - { - swapchain = swapchain_; - return *this; - } - - operator VkImageSwapchainCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageSwapchainCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchain == rhs.swapchain ); - } - - bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageSwapchainCreateInfoKHR::sType; - }; - static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageViewASTCDecodeModeEXT - { - protected: - ImageViewASTCDecodeModeEXT( vk::Format decodeMode_ = vk::Format::eUndefined ) - : decodeMode( decodeMode_ ) - {} - - ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; - const void* pNext = nullptr; - vk::Format decodeMode; - }; - static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "layout struct and wrapper have different size!" ); - } - - struct ImageViewASTCDecodeModeEXT : public layout::ImageViewASTCDecodeModeEXT - { - ImageViewASTCDecodeModeEXT( vk::Format decodeMode_ = vk::Format::eUndefined ) - : layout::ImageViewASTCDecodeModeEXT( decodeMode_ ) - {} - - ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) - : layout::ImageViewASTCDecodeModeEXT( rhs ) - {} - - ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageViewASTCDecodeModeEXT & setDecodeMode( vk::Format decodeMode_ ) - { - decodeMode = decodeMode_; - return *this; - } - - operator VkImageViewASTCDecodeModeEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageViewASTCDecodeModeEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeMode == rhs.decodeMode ); - } - - bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageViewASTCDecodeModeEXT::sType; - }; - static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageViewCreateInfo - { - protected: - ImageViewCreateInfo( vk::ImageViewCreateFlags flags_ = vk::ImageViewCreateFlags(), - vk::Image image_ = vk::Image(), - vk::ImageViewType viewType_ = vk::ImageViewType::e1D, - vk::Format format_ = vk::Format::eUndefined, - vk::ComponentMapping components_ = vk::ComponentMapping(), - vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) - : flags( flags_ ) - , image( image_ ) - , viewType( viewType_ ) - , format( format_ ) - , components( components_ ) - , subresourceRange( subresourceRange_ ) - {} - - ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageViewCreateInfo; - const void* pNext = nullptr; - vk::ImageViewCreateFlags flags; - vk::Image image; - vk::ImageViewType viewType; - vk::Format format; - vk::ComponentMapping components; - vk::ImageSubresourceRange subresourceRange; - }; - static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ImageViewCreateInfo : public layout::ImageViewCreateInfo - { - ImageViewCreateInfo( vk::ImageViewCreateFlags flags_ = vk::ImageViewCreateFlags(), - vk::Image image_ = vk::Image(), - vk::ImageViewType viewType_ = vk::ImageViewType::e1D, - vk::Format format_ = vk::Format::eUndefined, - vk::ComponentMapping components_ = vk::ComponentMapping(), - vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) - : layout::ImageViewCreateInfo( flags_, image_, viewType_, format_, components_, subresourceRange_ ) - {} - - ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) - : layout::ImageViewCreateInfo( rhs ) - {} - - ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageViewCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageViewCreateInfo & setFlags( vk::ImageViewCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - ImageViewCreateInfo & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - ImageViewCreateInfo & setViewType( vk::ImageViewType viewType_ ) - { - viewType = viewType_; - return *this; - } - - ImageViewCreateInfo & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - ImageViewCreateInfo & setComponents( vk::ComponentMapping components_ ) - { - components = components_; - return *this; - } - - ImageViewCreateInfo & setSubresourceRange( vk::ImageSubresourceRange subresourceRange_ ) - { - subresourceRange = subresourceRange_; - return *this; - } - - operator VkImageViewCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageViewCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageViewCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( image == rhs.image ) - && ( viewType == rhs.viewType ) - && ( format == rhs.format ) - && ( components == rhs.components ) - && ( subresourceRange == rhs.subresourceRange ); - } - - bool operator!=( ImageViewCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageViewCreateInfo::sType; - }; - static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageViewHandleInfoNVX - { - protected: - ImageViewHandleInfoNVX( vk::ImageView imageView_ = vk::ImageView(), - vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler, - vk::Sampler sampler_ = vk::Sampler() ) - : imageView( imageView_ ) - , descriptorType( descriptorType_ ) - , sampler( sampler_ ) - {} - - ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageViewHandleInfoNVX; - const void* pNext = nullptr; - vk::ImageView imageView; - vk::DescriptorType descriptorType; - vk::Sampler sampler; - }; - static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "layout struct and wrapper have different size!" ); - } - - struct ImageViewHandleInfoNVX : public layout::ImageViewHandleInfoNVX - { - ImageViewHandleInfoNVX( vk::ImageView imageView_ = vk::ImageView(), - vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler, - vk::Sampler sampler_ = vk::Sampler() ) - : layout::ImageViewHandleInfoNVX( imageView_, descriptorType_, sampler_ ) - {} - - ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) - : layout::ImageViewHandleInfoNVX( rhs ) - {} - - ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageViewHandleInfoNVX & setImageView( vk::ImageView imageView_ ) - { - imageView = imageView_; - return *this; - } - - ImageViewHandleInfoNVX & setDescriptorType( vk::DescriptorType descriptorType_ ) - { - descriptorType = descriptorType_; - return *this; - } - - ImageViewHandleInfoNVX & setSampler( vk::Sampler sampler_ ) - { - sampler = sampler_; - return *this; - } - - operator VkImageViewHandleInfoNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageViewHandleInfoNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageViewHandleInfoNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageView == rhs.imageView ) - && ( descriptorType == rhs.descriptorType ) - && ( sampler == rhs.sampler ); - } - - bool operator!=( ImageViewHandleInfoNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageViewHandleInfoNVX::sType; - }; - static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImageViewUsageCreateInfo - { - protected: - ImageViewUsageCreateInfo( vk::ImageUsageFlags usage_ = vk::ImageUsageFlags() ) - : usage( usage_ ) - {} - - ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImageViewUsageCreateInfo; - const void* pNext = nullptr; - vk::ImageUsageFlags usage; - }; - static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ImageViewUsageCreateInfo : public layout::ImageViewUsageCreateInfo - { - ImageViewUsageCreateInfo( vk::ImageUsageFlags usage_ = vk::ImageUsageFlags() ) - : layout::ImageViewUsageCreateInfo( usage_ ) - {} - - ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) - : layout::ImageViewUsageCreateInfo( rhs ) - {} - - ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImageViewUsageCreateInfo & setUsage( vk::ImageUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - operator VkImageViewUsageCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImageViewUsageCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImageViewUsageCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( usage == rhs.usage ); - } - - bool operator!=( ImageViewUsageCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImageViewUsageCreateInfo::sType; - }; - static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct ImportAndroidHardwareBufferInfoANDROID - { - protected: - ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) - : buffer( buffer_ ) - {} - - ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; - const void* pNext = nullptr; - struct AHardwareBuffer* buffer; - }; - static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "layout struct and wrapper have different size!" ); - } - - struct ImportAndroidHardwareBufferInfoANDROID : public layout::ImportAndroidHardwareBufferInfoANDROID - { - ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) - : layout::ImportAndroidHardwareBufferInfoANDROID( buffer_ ) - {} - - ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) - : layout::ImportAndroidHardwareBufferInfoANDROID( rhs ) - {} - - ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) - { - buffer = buffer_; - return *this; - } - - operator VkImportAndroidHardwareBufferInfoANDROID const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportAndroidHardwareBufferInfoANDROID &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportAndroidHardwareBufferInfoANDROID::sType; - }; - static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - namespace layout - { - struct ImportFenceFdInfoKHR - { - protected: - ImportFenceFdInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::FenceImportFlags flags_ = vk::FenceImportFlags(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - int fd_ = 0 ) - : fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) - {} - - ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportFenceFdInfoKHR; - const void* pNext = nullptr; - vk::Fence fence; - vk::FenceImportFlags flags; - vk::ExternalFenceHandleTypeFlagBits handleType; - int fd; - }; - static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImportFenceFdInfoKHR : public layout::ImportFenceFdInfoKHR - { - ImportFenceFdInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::FenceImportFlags flags_ = vk::FenceImportFlags(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - int fd_ = 0 ) - : layout::ImportFenceFdInfoKHR( fence_, flags_, handleType_, fd_ ) - {} - - ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) - : layout::ImportFenceFdInfoKHR( rhs ) - {} - - ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportFenceFdInfoKHR & setFence( vk::Fence fence_ ) - { - fence = fence_; - return *this; - } - - ImportFenceFdInfoKHR & setFlags( vk::FenceImportFlags flags_ ) - { - flags = flags_; - return *this; - } - - ImportFenceFdInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportFenceFdInfoKHR & setFd( int fd_ ) - { - fd = fd_; - return *this; - } - - operator VkImportFenceFdInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportFenceFdInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportFenceFdInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportFenceFdInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportFenceFdInfoKHR::sType; - }; - static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ImportFenceWin32HandleInfoKHR - { - protected: - ImportFenceWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::FenceImportFlags flags_ = vk::FenceImportFlags(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = 0, - LPCWSTR name_ = nullptr ) - : fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; - const void* pNext = nullptr; - vk::Fence fence; - vk::FenceImportFlags flags; - vk::ExternalFenceHandleTypeFlagBits handleType; - HANDLE handle; - LPCWSTR name; - }; - static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImportFenceWin32HandleInfoKHR : public layout::ImportFenceWin32HandleInfoKHR - { - ImportFenceWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(), - vk::FenceImportFlags flags_ = vk::FenceImportFlags(), - vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = 0, - LPCWSTR name_ = nullptr ) - : layout::ImportFenceWin32HandleInfoKHR( fence_, flags_, handleType_, handle_, name_ ) - {} - - ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) - : layout::ImportFenceWin32HandleInfoKHR( rhs ) - {} - - ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setFence( vk::Fence fence_ ) - { - fence = fence_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setFlags( vk::FenceImportFlags flags_ ) - { - flags = flags_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) - { - handle = handle_; - return *this; - } - - ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) - { - name = name_; - return *this; - } - - operator VkImportFenceWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportFenceWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fence == rhs.fence ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportFenceWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct ImportMemoryFdInfoKHR - { - protected: - ImportMemoryFdInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - int fd_ = 0 ) - : handleType( handleType_ ) - , fd( fd_ ) - {} - - ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportMemoryFdInfoKHR; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagBits handleType; - int fd; - }; - static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImportMemoryFdInfoKHR : public layout::ImportMemoryFdInfoKHR - { - ImportMemoryFdInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - int fd_ = 0 ) - : layout::ImportMemoryFdInfoKHR( handleType_, fd_ ) - {} - - ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) - : layout::ImportMemoryFdInfoKHR( rhs ) - {} - - ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportMemoryFdInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportMemoryFdInfoKHR & setFd( int fd_ ) - { - fd = fd_; - return *this; - } - - operator VkImportMemoryFdInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryFdInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryFdInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportMemoryFdInfoKHR::sType; - }; - static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ImportMemoryHostPointerInfoEXT - { - protected: - ImportMemoryHostPointerInfoEXT( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - void* pHostPointer_ = nullptr ) - : handleType( handleType_ ) - , pHostPointer( pHostPointer_ ) - {} - - ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagBits handleType; - void* pHostPointer; - }; - static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ImportMemoryHostPointerInfoEXT : public layout::ImportMemoryHostPointerInfoEXT - { - ImportMemoryHostPointerInfoEXT( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - void* pHostPointer_ = nullptr ) - : layout::ImportMemoryHostPointerInfoEXT( handleType_, pHostPointer_ ) - {} - - ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) - : layout::ImportMemoryHostPointerInfoEXT( rhs ) - {} - - ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) - { - pHostPointer = pHostPointer_; - return *this; - } - - operator VkImportMemoryHostPointerInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryHostPointerInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( pHostPointer == rhs.pHostPointer ); - } - - bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportMemoryHostPointerInfoEXT::sType; - }; - static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ImportMemoryWin32HandleInfoKHR - { - protected: - ImportMemoryWin32HandleInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = 0, - LPCWSTR name_ = nullptr ) - : handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagBits handleType; - HANDLE handle; - LPCWSTR name; - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImportMemoryWin32HandleInfoKHR : public layout::ImportMemoryWin32HandleInfoKHR - { - ImportMemoryWin32HandleInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = 0, - LPCWSTR name_ = nullptr ) - : layout::ImportMemoryWin32HandleInfoKHR( handleType_, handle_, name_ ) - {} - - ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) - : layout::ImportMemoryWin32HandleInfoKHR( rhs ) - {} - - ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) - { - handle = handle_; - return *this; - } - - ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) - { - name = name_; - return *this; - } - - operator VkImportMemoryWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportMemoryWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ImportMemoryWin32HandleInfoNV - { - protected: - ImportMemoryWin32HandleInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleType_ = vk::ExternalMemoryHandleTypeFlagsNV(), - HANDLE handle_ = 0 ) - : handleType( handleType_ ) - , handle( handle_ ) - {} - - ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagsNV handleType; - HANDLE handle; - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct ImportMemoryWin32HandleInfoNV : public layout::ImportMemoryWin32HandleInfoNV - { - ImportMemoryWin32HandleInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleType_ = vk::ExternalMemoryHandleTypeFlagsNV(), - HANDLE handle_ = 0 ) - : layout::ImportMemoryWin32HandleInfoNV( handleType_, handle_ ) - {} - - ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) - : layout::ImportMemoryWin32HandleInfoNV( rhs ) - {} - - ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setHandleType( vk::ExternalMemoryHandleTypeFlagsNV handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) - { - handle = handle_; - return *this; - } - - operator VkImportMemoryWin32HandleInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ); - } - - bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportMemoryWin32HandleInfoNV::sType; - }; - static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct ImportSemaphoreFdInfoKHR - { - protected: - ImportSemaphoreFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - int fd_ = 0 ) - : semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) - {} - - ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; - const void* pNext = nullptr; - vk::Semaphore semaphore; - vk::SemaphoreImportFlags flags; - vk::ExternalSemaphoreHandleTypeFlagBits handleType; - int fd; - }; - static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImportSemaphoreFdInfoKHR : public layout::ImportSemaphoreFdInfoKHR - { - ImportSemaphoreFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - int fd_ = 0 ) - : layout::ImportSemaphoreFdInfoKHR( semaphore_, flags_, handleType_, fd_ ) - {} - - ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) - : layout::ImportSemaphoreFdInfoKHR( rhs ) - {} - - ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) - { - semaphore = semaphore_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setFlags( vk::SemaphoreImportFlags flags_ ) - { - flags = flags_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportSemaphoreFdInfoKHR & setFd( int fd_ ) - { - fd = fd_; - return *this; - } - - operator VkImportSemaphoreFdInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportSemaphoreFdInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( fd == rhs.fd ); - } - - bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportSemaphoreFdInfoKHR::sType; - }; - static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct ImportSemaphoreWin32HandleInfoKHR - { - protected: - ImportSemaphoreWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = 0, - LPCWSTR name_ = nullptr ) - : semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) - {} - - ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; - const void* pNext = nullptr; - vk::Semaphore semaphore; - vk::SemaphoreImportFlags flags; - vk::ExternalSemaphoreHandleTypeFlagBits handleType; - HANDLE handle; - LPCWSTR name; - }; - static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct ImportSemaphoreWin32HandleInfoKHR : public layout::ImportSemaphoreWin32HandleInfoKHR - { - ImportSemaphoreWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = 0, - LPCWSTR name_ = nullptr ) - : layout::ImportSemaphoreWin32HandleInfoKHR( semaphore_, flags_, handleType_, handle_, name_ ) - {} - - ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) - : layout::ImportSemaphoreWin32HandleInfoKHR( rhs ) - {} - - ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) - { - semaphore = semaphore_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setFlags( vk::SemaphoreImportFlags flags_ ) - { - flags = flags_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) - { - handle = handle_; - return *this; - } - - ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) - { - name = name_; - return *this; - } - - operator VkImportSemaphoreWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkImportSemaphoreWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( flags == rhs.flags ) - && ( handleType == rhs.handleType ) - && ( handle == rhs.handle ) - && ( name == rhs.name ); - } - - bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ImportSemaphoreWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct IndirectCommandsLayoutTokenNVX - { - IndirectCommandsLayoutTokenNVX( vk::IndirectCommandsTokenTypeNVX tokenType_ = vk::IndirectCommandsTokenTypeNVX::ePipeline, - uint32_t bindingUnit_ = 0, - uint32_t dynamicCount_ = 0, - uint32_t divisor_ = 0 ) - : tokenType( tokenType_ ) - , bindingUnit( bindingUnit_ ) - , dynamicCount( dynamicCount_ ) - , divisor( divisor_ ) - {} - - IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setTokenType( vk::IndirectCommandsTokenTypeNVX tokenType_ ) - { - tokenType = tokenType_; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setBindingUnit( uint32_t bindingUnit_ ) - { - bindingUnit = bindingUnit_; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setDynamicCount( uint32_t dynamicCount_ ) - { - dynamicCount = dynamicCount_; - return *this; - } - - IndirectCommandsLayoutTokenNVX & setDivisor( uint32_t divisor_ ) - { - divisor = divisor_; - return *this; - } - - operator VkIndirectCommandsLayoutTokenNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsLayoutTokenNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const - { - return ( tokenType == rhs.tokenType ) - && ( bindingUnit == rhs.bindingUnit ) - && ( dynamicCount == rhs.dynamicCount ) - && ( divisor == rhs.divisor ); - } - - bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::IndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; - }; - static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct IndirectCommandsLayoutCreateInfoNVX - { - protected: - IndirectCommandsLayoutCreateInfoNVX( vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - vk::IndirectCommandsLayoutUsageFlagsNVX flags_ = vk::IndirectCommandsLayoutUsageFlagsNVX(), - uint32_t tokenCount_ = 0, - const vk::IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) - : pipelineBindPoint( pipelineBindPoint_ ) - , flags( flags_ ) - , tokenCount( tokenCount_ ) - , pTokens( pTokens_ ) - {} - - IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX; - const void* pNext = nullptr; - vk::PipelineBindPoint pipelineBindPoint; - vk::IndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const vk::IndirectCommandsLayoutTokenNVX* pTokens; - }; - static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "layout struct and wrapper have different size!" ); - } - - struct IndirectCommandsLayoutCreateInfoNVX : public layout::IndirectCommandsLayoutCreateInfoNVX - { - IndirectCommandsLayoutCreateInfoNVX( vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - vk::IndirectCommandsLayoutUsageFlagsNVX flags_ = vk::IndirectCommandsLayoutUsageFlagsNVX(), - uint32_t tokenCount_ = 0, - const vk::IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) - : layout::IndirectCommandsLayoutCreateInfoNVX( pipelineBindPoint_, flags_, tokenCount_, pTokens_ ) - {} - - IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) - : layout::IndirectCommandsLayoutCreateInfoNVX( rhs ) - {} - - IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setFlags( vk::IndirectCommandsLayoutUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setTokenCount( uint32_t tokenCount_ ) - { - tokenCount = tokenCount_; - return *this; - } - - IndirectCommandsLayoutCreateInfoNVX & setPTokens( const vk::IndirectCommandsLayoutTokenNVX* pTokens_ ) - { - pTokens = pTokens_; - return *this; - } - - operator VkIndirectCommandsLayoutCreateInfoNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectCommandsLayoutCreateInfoNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( flags == rhs.flags ) - && ( tokenCount == rhs.tokenCount ) - && ( pTokens == rhs.pTokens ); - } - - bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::IndirectCommandsLayoutCreateInfoNVX::sType; - }; - static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct InitializePerformanceApiInfoINTEL - { - protected: - InitializePerformanceApiInfoINTEL( void* pUserData_ = nullptr ) - : pUserData( pUserData_ ) - {} - - InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; - const void* pNext = nullptr; - void* pUserData; - }; - static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "layout struct and wrapper have different size!" ); - } - - struct InitializePerformanceApiInfoINTEL : public layout::InitializePerformanceApiInfoINTEL - { - InitializePerformanceApiInfoINTEL( void* pUserData_ = nullptr ) - : layout::InitializePerformanceApiInfoINTEL( pUserData_ ) - {} - - InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) - : layout::InitializePerformanceApiInfoINTEL( rhs ) - {} - - InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) - { - pUserData = pUserData_; - return *this; - } - - operator VkInitializePerformanceApiInfoINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkInitializePerformanceApiInfoINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pUserData == rhs.pUserData ); - } - - bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::InitializePerformanceApiInfoINTEL::sType; - }; - static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct InputAttachmentAspectReference - { - InputAttachmentAspectReference( uint32_t subpass_ = 0, - uint32_t inputAttachmentIndex_ = 0, - vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags() ) - : subpass( subpass_ ) - , inputAttachmentIndex( inputAttachmentIndex_ ) - , aspectMask( aspectMask_ ) - {} - - InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) - { - subpass = subpass_; - return *this; - } - - InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) - { - inputAttachmentIndex = inputAttachmentIndex_; - return *this; - } - - InputAttachmentAspectReference & setAspectMask( vk::ImageAspectFlags aspectMask_ ) - { - aspectMask = aspectMask_; - return *this; - } - - operator VkInputAttachmentAspectReference const&() const - { - return *reinterpret_cast( this ); - } - - operator VkInputAttachmentAspectReference &() - { - return *reinterpret_cast( this ); - } - - bool operator==( InputAttachmentAspectReference const& rhs ) const - { - return ( subpass == rhs.subpass ) - && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) - && ( aspectMask == rhs.aspectMask ); - } - - bool operator!=( InputAttachmentAspectReference const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t subpass; - uint32_t inputAttachmentIndex; - vk::ImageAspectFlags aspectMask; - }; - static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct InstanceCreateInfo - { - protected: - InstanceCreateInfo( vk::InstanceCreateFlags flags_ = vk::InstanceCreateFlags(), - const vk::ApplicationInfo* pApplicationInfo_ = nullptr, - uint32_t enabledLayerCount_ = 0, - const char* const* ppEnabledLayerNames_ = nullptr, - uint32_t enabledExtensionCount_ = 0, - const char* const* ppEnabledExtensionNames_ = nullptr ) - : flags( flags_ ) - , pApplicationInfo( pApplicationInfo_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) - {} - - InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eInstanceCreateInfo; - const void* pNext = nullptr; - vk::InstanceCreateFlags flags; - const vk::ApplicationInfo* pApplicationInfo; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; - }; - static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct InstanceCreateInfo : public layout::InstanceCreateInfo - { - InstanceCreateInfo( vk::InstanceCreateFlags flags_ = vk::InstanceCreateFlags(), - const vk::ApplicationInfo* pApplicationInfo_ = nullptr, - uint32_t enabledLayerCount_ = 0, - const char* const* ppEnabledLayerNames_ = nullptr, - uint32_t enabledExtensionCount_ = 0, - const char* const* ppEnabledExtensionNames_ = nullptr ) - : layout::InstanceCreateInfo( flags_, pApplicationInfo_, enabledLayerCount_, ppEnabledLayerNames_, enabledExtensionCount_, ppEnabledExtensionNames_ ) - {} - - InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) - : layout::InstanceCreateInfo( rhs ) - {} - - InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - InstanceCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - InstanceCreateInfo & setFlags( vk::InstanceCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - InstanceCreateInfo & setPApplicationInfo( const vk::ApplicationInfo* pApplicationInfo_ ) - { - pApplicationInfo = pApplicationInfo_; - return *this; - } - - InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) - { - enabledLayerCount = enabledLayerCount_; - return *this; - } - - InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) - { - ppEnabledLayerNames = ppEnabledLayerNames_; - return *this; - } - - InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) - { - enabledExtensionCount = enabledExtensionCount_; - return *this; - } - - InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) - { - ppEnabledExtensionNames = ppEnabledExtensionNames_; - return *this; - } - - operator VkInstanceCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkInstanceCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( InstanceCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pApplicationInfo == rhs.pApplicationInfo ) - && ( enabledLayerCount == rhs.enabledLayerCount ) - && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) - && ( enabledExtensionCount == rhs.enabledExtensionCount ) - && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); - } - - bool operator!=( InstanceCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::InstanceCreateInfo::sType; - }; - static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct LayerProperties - { - operator VkLayerProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkLayerProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( LayerProperties const& rhs ) const - { - return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( specVersion == rhs.specVersion ) - && ( implementationVersion == rhs.implementationVersion ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ); - } - - bool operator!=( LayerProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - char layerName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; - uint32_t implementationVersion; - char description[VK_MAX_DESCRIPTION_SIZE]; - }; - static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_MACOS_MVK - - namespace layout - { - struct MacOSSurfaceCreateInfoMVK - { - protected: - MacOSSurfaceCreateInfoMVK( vk::MacOSSurfaceCreateFlagsMVK flags_ = vk::MacOSSurfaceCreateFlagsMVK(), - const void* pView_ = nullptr ) - : flags( flags_ ) - , pView( pView_ ) - {} - - MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; - const void* pNext = nullptr; - vk::MacOSSurfaceCreateFlagsMVK flags; - const void* pView; - }; - static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "layout struct and wrapper have different size!" ); - } - - struct MacOSSurfaceCreateInfoMVK : public layout::MacOSSurfaceCreateInfoMVK - { - MacOSSurfaceCreateInfoMVK( vk::MacOSSurfaceCreateFlagsMVK flags_ = vk::MacOSSurfaceCreateFlagsMVK(), - const void* pView_ = nullptr ) - : layout::MacOSSurfaceCreateInfoMVK( flags_, pView_ ) - {} - - MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) - : layout::MacOSSurfaceCreateInfoMVK( rhs ) - {} - - MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MacOSSurfaceCreateInfoMVK & setFlags( vk::MacOSSurfaceCreateFlagsMVK flags_ ) - { - flags = flags_; - return *this; - } - - MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) - { - pView = pView_; - return *this; - } - - operator VkMacOSSurfaceCreateInfoMVK const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMacOSSurfaceCreateInfoMVK &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pView == rhs.pView ); - } - - bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MacOSSurfaceCreateInfoMVK::sType; - }; - static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - namespace layout - { - struct MappedMemoryRange - { - protected: - MappedMemoryRange( vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize offset_ = 0, - vk::DeviceSize size_ = 0 ) - : memory( memory_ ) - , offset( offset_ ) - , size( size_ ) - {} - - MappedMemoryRange( VkMappedMemoryRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMappedMemoryRange; - const void* pNext = nullptr; - vk::DeviceMemory memory; - vk::DeviceSize offset; - vk::DeviceSize size; - }; - static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "layout struct and wrapper have different size!" ); - } - - struct MappedMemoryRange : public layout::MappedMemoryRange - { - MappedMemoryRange( vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::DeviceSize offset_ = 0, - vk::DeviceSize size_ = 0 ) - : layout::MappedMemoryRange( memory_, offset_, size_ ) - {} - - MappedMemoryRange( VkMappedMemoryRange const & rhs ) - : layout::MappedMemoryRange( rhs ) - {} - - MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MappedMemoryRange & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MappedMemoryRange & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - MappedMemoryRange & setOffset( vk::DeviceSize offset_ ) - { - offset = offset_; - return *this; - } - - MappedMemoryRange & setSize( vk::DeviceSize size_ ) - { - size = size_; - return *this; - } - - operator VkMappedMemoryRange const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMappedMemoryRange &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MappedMemoryRange const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( MappedMemoryRange const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MappedMemoryRange::sType; - }; - static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryAllocateFlagsInfo - { - protected: - MemoryAllocateFlagsInfo( vk::MemoryAllocateFlags flags_ = vk::MemoryAllocateFlags(), - uint32_t deviceMask_ = 0 ) - : flags( flags_ ) - , deviceMask( deviceMask_ ) - {} - - MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; - const void* pNext = nullptr; - vk::MemoryAllocateFlags flags; - uint32_t deviceMask; - }; - static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "layout struct and wrapper have different size!" ); - } - - struct MemoryAllocateFlagsInfo : public layout::MemoryAllocateFlagsInfo - { - MemoryAllocateFlagsInfo( vk::MemoryAllocateFlags flags_ = vk::MemoryAllocateFlags(), - uint32_t deviceMask_ = 0 ) - : layout::MemoryAllocateFlagsInfo( flags_, deviceMask_ ) - {} - - MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) - : layout::MemoryAllocateFlagsInfo( rhs ) - {} - - MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryAllocateFlagsInfo & setFlags( vk::MemoryAllocateFlags flags_ ) - { - flags = flags_; - return *this; - } - - MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) - { - deviceMask = deviceMask_; - return *this; - } - - operator VkMemoryAllocateFlagsInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryAllocateFlagsInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryAllocateFlagsInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( deviceMask == rhs.deviceMask ); - } - - bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryAllocateFlagsInfo::sType; - }; - static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryAllocateInfo - { - protected: - MemoryAllocateInfo( vk::DeviceSize allocationSize_ = 0, - uint32_t memoryTypeIndex_ = 0 ) - : allocationSize( allocationSize_ ) - , memoryTypeIndex( memoryTypeIndex_ ) - {} - - MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryAllocateInfo; - const void* pNext = nullptr; - vk::DeviceSize allocationSize; - uint32_t memoryTypeIndex; - }; - static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "layout struct and wrapper have different size!" ); - } - - struct MemoryAllocateInfo : public layout::MemoryAllocateInfo - { - MemoryAllocateInfo( vk::DeviceSize allocationSize_ = 0, - uint32_t memoryTypeIndex_ = 0 ) - : layout::MemoryAllocateInfo( allocationSize_, memoryTypeIndex_ ) - {} - - MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) - : layout::MemoryAllocateInfo( rhs ) - {} - - MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryAllocateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryAllocateInfo & setAllocationSize( vk::DeviceSize allocationSize_ ) - { - allocationSize = allocationSize_; - return *this; - } - - MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) - { - memoryTypeIndex = memoryTypeIndex_; - return *this; - } - - operator VkMemoryAllocateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryAllocateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryAllocateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( allocationSize == rhs.allocationSize ) - && ( memoryTypeIndex == rhs.memoryTypeIndex ); - } - - bool operator!=( MemoryAllocateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryAllocateInfo::sType; - }; - static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryBarrier - { - protected: - MemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags() ) - : srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - {} - - MemoryBarrier( VkMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryBarrier; - const void* pNext = nullptr; - vk::AccessFlags srcAccessMask; - vk::AccessFlags dstAccessMask; - }; - static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "layout struct and wrapper have different size!" ); - } - - struct MemoryBarrier : public layout::MemoryBarrier - { - MemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags() ) - : layout::MemoryBarrier( srcAccessMask_, dstAccessMask_ ) - {} - - MemoryBarrier( VkMemoryBarrier const & rhs ) - : layout::MemoryBarrier( rhs ) - {} - - MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryBarrier & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryBarrier & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) - { - srcAccessMask = srcAccessMask_; - return *this; - } - - MemoryBarrier & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) - { - dstAccessMask = dstAccessMask_; - return *this; - } - - operator VkMemoryBarrier const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryBarrier &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryBarrier const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ); - } - - bool operator!=( MemoryBarrier const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryBarrier::sType; - }; - static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryDedicatedAllocateInfo - { - protected: - MemoryDedicatedAllocateInfo( vk::Image image_ = vk::Image(), - vk::Buffer buffer_ = vk::Buffer() ) - : image( image_ ) - , buffer( buffer_ ) - {} - - MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; - const void* pNext = nullptr; - vk::Image image; - vk::Buffer buffer; - }; - static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "layout struct and wrapper have different size!" ); - } - - struct MemoryDedicatedAllocateInfo : public layout::MemoryDedicatedAllocateInfo - { - MemoryDedicatedAllocateInfo( vk::Image image_ = vk::Image(), - vk::Buffer buffer_ = vk::Buffer() ) - : layout::MemoryDedicatedAllocateInfo( image_, buffer_ ) - {} - - MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) - : layout::MemoryDedicatedAllocateInfo( rhs ) - {} - - MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryDedicatedAllocateInfo & setImage( vk::Image image_ ) - { - image = image_; - return *this; - } - - MemoryDedicatedAllocateInfo & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - operator VkMemoryDedicatedAllocateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryDedicatedAllocateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( image == rhs.image ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryDedicatedAllocateInfo::sType; - }; - static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryDedicatedRequirements - { - protected: - MemoryDedicatedRequirements( vk::Bool32 prefersDedicatedAllocation_ = 0, - vk::Bool32 requiresDedicatedAllocation_ = 0 ) - : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) - , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) - {} - - MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryDedicatedRequirements; - void* pNext = nullptr; - vk::Bool32 prefersDedicatedAllocation; - vk::Bool32 requiresDedicatedAllocation; - }; - static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "layout struct and wrapper have different size!" ); - } - - struct MemoryDedicatedRequirements : public layout::MemoryDedicatedRequirements - { - operator VkMemoryDedicatedRequirements const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryDedicatedRequirements &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryDedicatedRequirements const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) - && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); - } - - bool operator!=( MemoryDedicatedRequirements const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryDedicatedRequirements::sType; - }; - static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryFdPropertiesKHR - { - protected: - MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = 0 ) - : memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryFdPropertiesKHR; - void* pNext = nullptr; - uint32_t memoryTypeBits; - }; - static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct MemoryFdPropertiesKHR : public layout::MemoryFdPropertiesKHR - { - operator VkMemoryFdPropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryFdPropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryFdPropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryFdPropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryFdPropertiesKHR::sType; - }; - static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - - namespace layout - { - struct MemoryGetAndroidHardwareBufferInfoANDROID - { - protected: - MemoryGetAndroidHardwareBufferInfoANDROID( vk::DeviceMemory memory_ = vk::DeviceMemory() ) - : memory( memory_ ) - {} - - MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; - const void* pNext = nullptr; - vk::DeviceMemory memory; - }; - static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "layout struct and wrapper have different size!" ); - } - - struct MemoryGetAndroidHardwareBufferInfoANDROID : public layout::MemoryGetAndroidHardwareBufferInfoANDROID - { - MemoryGetAndroidHardwareBufferInfoANDROID( vk::DeviceMemory memory_ = vk::DeviceMemory() ) - : layout::MemoryGetAndroidHardwareBufferInfoANDROID( memory_ ) - {} - - MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) - : layout::MemoryGetAndroidHardwareBufferInfoANDROID( rhs ) - {} - - MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ); - } - - bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryGetAndroidHardwareBufferInfoANDROID::sType; - }; - static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - namespace layout - { - struct MemoryGetFdInfoKHR - { - protected: - MemoryGetFdInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : memory( memory_ ) - , handleType( handleType_ ) - {} - - MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryGetFdInfoKHR; - const void* pNext = nullptr; - vk::DeviceMemory memory; - vk::ExternalMemoryHandleTypeFlagBits handleType; - }; - static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct MemoryGetFdInfoKHR : public layout::MemoryGetFdInfoKHR - { - MemoryGetFdInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : layout::MemoryGetFdInfoKHR( memory_, handleType_ ) - {} - - MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) - : layout::MemoryGetFdInfoKHR( rhs ) - {} - - MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryGetFdInfoKHR & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - MemoryGetFdInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkMemoryGetFdInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryGetFdInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryGetFdInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( MemoryGetFdInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryGetFdInfoKHR::sType; - }; - static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct MemoryGetWin32HandleInfoKHR - { - protected: - MemoryGetWin32HandleInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : memory( memory_ ) - , handleType( handleType_ ) - {} - - MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; - const void* pNext = nullptr; - vk::DeviceMemory memory; - vk::ExternalMemoryHandleTypeFlagBits handleType; - }; - static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct MemoryGetWin32HandleInfoKHR : public layout::MemoryGetWin32HandleInfoKHR - { - MemoryGetWin32HandleInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(), - vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : layout::MemoryGetWin32HandleInfoKHR( memory_, handleType_ ) - {} - - MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) - : layout::MemoryGetWin32HandleInfoKHR( rhs ) - {} - - MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryGetWin32HandleInfoKHR & setMemory( vk::DeviceMemory memory_ ) - { - memory = memory_; - return *this; - } - - MemoryGetWin32HandleInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkMemoryGetWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryGetWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memory == rhs.memory ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryGetWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct MemoryHeap - { - operator VkMemoryHeap const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryHeap &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryHeap const& rhs ) const - { - return ( size == rhs.size ) - && ( flags == rhs.flags ); - } - - bool operator!=( MemoryHeap const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DeviceSize size; - vk::MemoryHeapFlags flags; - }; - static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryHostPointerPropertiesEXT - { - protected: - MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = 0 ) - : memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; - void* pNext = nullptr; - uint32_t memoryTypeBits; - }; - static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct MemoryHostPointerPropertiesEXT : public layout::MemoryHostPointerPropertiesEXT - { - operator VkMemoryHostPointerPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryHostPointerPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryHostPointerPropertiesEXT::sType; - }; - static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryPriorityAllocateInfoEXT - { - protected: - MemoryPriorityAllocateInfoEXT( float priority_ = 0 ) - : priority( priority_ ) - {} - - MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; - const void* pNext = nullptr; - float priority; - }; - static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct MemoryPriorityAllocateInfoEXT : public layout::MemoryPriorityAllocateInfoEXT - { - MemoryPriorityAllocateInfoEXT( float priority_ = 0 ) - : layout::MemoryPriorityAllocateInfoEXT( priority_ ) - {} - - MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) - : layout::MemoryPriorityAllocateInfoEXT( rhs ) - {} - - MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) - { - priority = priority_; - return *this; - } - - operator VkMemoryPriorityAllocateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryPriorityAllocateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( priority == rhs.priority ); - } - - bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryPriorityAllocateInfoEXT::sType; - }; - static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryRequirements - { - operator VkMemoryRequirements const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryRequirements const& rhs ) const - { - return ( size == rhs.size ) - && ( alignment == rhs.alignment ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryRequirements const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::DeviceSize size; - vk::DeviceSize alignment; - uint32_t memoryTypeBits; - }; - static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct MemoryRequirements2 - { - protected: - MemoryRequirements2( vk::MemoryRequirements memoryRequirements_ = vk::MemoryRequirements() ) - : memoryRequirements( memoryRequirements_ ) - {} - - MemoryRequirements2( VkMemoryRequirements2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryRequirements2; - void* pNext = nullptr; - vk::MemoryRequirements memoryRequirements; - }; - static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "layout struct and wrapper have different size!" ); - } - - struct MemoryRequirements2 : public layout::MemoryRequirements2 - { - operator VkMemoryRequirements2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryRequirements2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryRequirements2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( MemoryRequirements2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryRequirements2::sType; - }; - static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct MemoryType - { - operator VkMemoryType const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryType &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryType const& rhs ) const - { - return ( propertyFlags == rhs.propertyFlags ) - && ( heapIndex == rhs.heapIndex ); - } - - bool operator!=( MemoryType const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::MemoryPropertyFlags propertyFlags; - uint32_t heapIndex; - }; - static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct MemoryWin32HandlePropertiesKHR - { - protected: - MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = 0 ) - : memoryTypeBits( memoryTypeBits_ ) - {} - - MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; - void* pNext = nullptr; - uint32_t memoryTypeBits; - }; - static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct MemoryWin32HandlePropertiesKHR : public layout::MemoryWin32HandlePropertiesKHR - { - operator VkMemoryWin32HandlePropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMemoryWin32HandlePropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryTypeBits == rhs.memoryTypeBits ); - } - - bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MemoryWin32HandlePropertiesKHR::sType; - }; - static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - - namespace layout - { - struct MetalSurfaceCreateInfoEXT - { - protected: - MetalSurfaceCreateInfoEXT( vk::MetalSurfaceCreateFlagsEXT flags_ = vk::MetalSurfaceCreateFlagsEXT(), - const CAMetalLayer* pLayer_ = nullptr ) - : flags( flags_ ) - , pLayer( pLayer_ ) - {} - - MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; - const void* pNext = nullptr; - vk::MetalSurfaceCreateFlagsEXT flags; - const CAMetalLayer* pLayer; - }; - static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct MetalSurfaceCreateInfoEXT : public layout::MetalSurfaceCreateInfoEXT - { - MetalSurfaceCreateInfoEXT( vk::MetalSurfaceCreateFlagsEXT flags_ = vk::MetalSurfaceCreateFlagsEXT(), - const CAMetalLayer* pLayer_ = nullptr ) - : layout::MetalSurfaceCreateInfoEXT( flags_, pLayer_ ) - {} - - MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) - : layout::MetalSurfaceCreateInfoEXT( rhs ) - {} - - MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - MetalSurfaceCreateInfoEXT & setFlags( vk::MetalSurfaceCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) - { - pLayer = pLayer_; - return *this; - } - - operator VkMetalSurfaceCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMetalSurfaceCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pLayer == rhs.pLayer ); - } - - bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MetalSurfaceCreateInfoEXT::sType; - }; - static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - namespace layout - { - struct MultisamplePropertiesEXT - { - protected: - MultisamplePropertiesEXT( vk::Extent2D maxSampleLocationGridSize_ = vk::Extent2D() ) - : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - {} - - MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eMultisamplePropertiesEXT; - void* pNext = nullptr; - vk::Extent2D maxSampleLocationGridSize; - }; - static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct MultisamplePropertiesEXT : public layout::MultisamplePropertiesEXT - { - operator VkMultisamplePropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkMultisamplePropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( MultisamplePropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); - } - - bool operator!=( MultisamplePropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::MultisamplePropertiesEXT::sType; - }; - static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ObjectTableCreateInfoNVX - { - protected: - ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, - const vk::ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, - const uint32_t* pObjectEntryCounts_ = nullptr, - const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, - uint32_t maxUniformBuffersPerDescriptor_ = 0, - uint32_t maxStorageBuffersPerDescriptor_ = 0, - uint32_t maxStorageImagesPerDescriptor_ = 0, - uint32_t maxSampledImagesPerDescriptor_ = 0, - uint32_t maxPipelineLayouts_ = 0 ) - : objectCount( objectCount_ ) - , pObjectEntryTypes( pObjectEntryTypes_ ) - , pObjectEntryCounts( pObjectEntryCounts_ ) - , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ ) - , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ ) - , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ ) - , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ ) - , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ ) - , maxPipelineLayouts( maxPipelineLayouts_ ) - {} - - ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eObjectTableCreateInfoNVX; - const void* pNext = nullptr; - uint32_t objectCount; - const vk::ObjectEntryTypeNVX* pObjectEntryTypes; - const uint32_t* pObjectEntryCounts; - const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; - }; - static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "layout struct and wrapper have different size!" ); - } - - struct ObjectTableCreateInfoNVX : public layout::ObjectTableCreateInfoNVX - { - ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, - const vk::ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, - const uint32_t* pObjectEntryCounts_ = nullptr, - const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, - uint32_t maxUniformBuffersPerDescriptor_ = 0, - uint32_t maxStorageBuffersPerDescriptor_ = 0, - uint32_t maxStorageImagesPerDescriptor_ = 0, - uint32_t maxSampledImagesPerDescriptor_ = 0, - uint32_t maxPipelineLayouts_ = 0 ) - : layout::ObjectTableCreateInfoNVX( objectCount_, pObjectEntryTypes_, pObjectEntryCounts_, pObjectEntryUsageFlags_, maxUniformBuffersPerDescriptor_, maxStorageBuffersPerDescriptor_, maxStorageImagesPerDescriptor_, maxSampledImagesPerDescriptor_, maxPipelineLayouts_ ) - {} - - ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) - : layout::ObjectTableCreateInfoNVX( rhs ) - {} - - ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) - { - objectCount = objectCount_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryTypes( const vk::ObjectEntryTypeNVX* pObjectEntryTypes_ ) - { - pObjectEntryTypes = pObjectEntryTypes_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) - { - pObjectEntryCounts = pObjectEntryCounts_; - return *this; - } - - ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) - { - pObjectEntryUsageFlags = pObjectEntryUsageFlags_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) - { - maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) - { - maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) - { - maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) - { - maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_; - return *this; - } - - ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) - { - maxPipelineLayouts = maxPipelineLayouts_; - return *this; - } - - operator VkObjectTableCreateInfoNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableCreateInfoNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableCreateInfoNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( objectCount == rhs.objectCount ) - && ( pObjectEntryTypes == rhs.pObjectEntryTypes ) - && ( pObjectEntryCounts == rhs.pObjectEntryCounts ) - && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags ) - && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor ) - && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor ) - && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor ) - && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor ) - && ( maxPipelineLayouts == rhs.maxPipelineLayouts ); - } - - bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ObjectTableCreateInfoNVX::sType; - }; - static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableEntryNVX - { - ObjectTableEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet, - vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX() ) - : type( type_ ) - , flags( flags_ ) - {} - - ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTableEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) - { - type = type_; - return *this; - } - - ObjectTableEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - operator VkObjectTableEntryNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableEntryNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableEntryNVX const& rhs ) const - { - return ( type == rhs.type ) - && ( flags == rhs.flags ); - } - - bool operator!=( ObjectTableEntryNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ObjectEntryTypeNVX type; - vk::ObjectEntryUsageFlagsNVX flags; - }; - static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableDescriptorSetEntryNVX - { - ObjectTableDescriptorSetEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet, - vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(), - vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(), - vk::DescriptorSet descriptorSet_ = vk::DescriptorSet() ) - : type( type_ ) - , flags( flags_ ) - , pipelineLayout( pipelineLayout_ ) - , descriptorSet( descriptorSet_ ) - {} - - explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(), - vk::DescriptorSet descriptorSet_ = vk::DescriptorSet() ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipelineLayout( pipelineLayout_ ) - , descriptorSet( descriptorSet_ ) - - {} - - ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) - { - type = type_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setPipelineLayout( vk::PipelineLayout pipelineLayout_ ) - { - pipelineLayout = pipelineLayout_; - return *this; - } - - ObjectTableDescriptorSetEntryNVX & setDescriptorSet( vk::DescriptorSet descriptorSet_ ) - { - descriptorSet = descriptorSet_; - return *this; - } - - operator VkObjectTableDescriptorSetEntryNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableDescriptorSetEntryNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( descriptorSet == rhs.descriptorSet ); - } - - bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ObjectEntryTypeNVX type; - vk::ObjectEntryUsageFlagsNVX flags; - vk::PipelineLayout pipelineLayout; - vk::DescriptorSet descriptorSet; - }; - static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableIndexBufferEntryNVX - { - ObjectTableIndexBufferEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet, - vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(), - vk::Buffer buffer_ = vk::Buffer(), - vk::IndexType indexType_ = vk::IndexType::eUint16 ) - : type( type_ ) - , flags( flags_ ) - , buffer( buffer_ ) - , indexType( indexType_ ) - {} - - explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - vk::Buffer buffer_ = vk::Buffer(), - vk::IndexType indexType_ = vk::IndexType::eUint16 ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , buffer( buffer_ ) - , indexType( indexType_ ) - - {} - - ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) - { - type = type_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - ObjectTableIndexBufferEntryNVX & setIndexType( vk::IndexType indexType_ ) - { - indexType = indexType_; - return *this; - } - - operator VkObjectTableIndexBufferEntryNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableIndexBufferEntryNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ) - && ( indexType == rhs.indexType ); - } - - bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ObjectEntryTypeNVX type; - vk::ObjectEntryUsageFlagsNVX flags; - vk::Buffer buffer; - vk::IndexType indexType; - }; - static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTablePipelineEntryNVX - { - ObjectTablePipelineEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet, - vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(), - vk::Pipeline pipeline_ = vk::Pipeline() ) - : type( type_ ) - , flags( flags_ ) - , pipeline( pipeline_ ) - {} - - explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - vk::Pipeline pipeline_ = vk::Pipeline() ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipeline( pipeline_ ) - - {} - - ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTablePipelineEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) - { - type = type_; - return *this; - } - - ObjectTablePipelineEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - ObjectTablePipelineEntryNVX & setPipeline( vk::Pipeline pipeline_ ) - { - pipeline = pipeline_; - return *this; - } - - operator VkObjectTablePipelineEntryNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTablePipelineEntryNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipeline == rhs.pipeline ); - } - - bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ObjectEntryTypeNVX type; - vk::ObjectEntryUsageFlagsNVX flags; - vk::Pipeline pipeline; - }; - static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTablePushConstantEntryNVX - { - ObjectTablePushConstantEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet, - vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(), - vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(), - vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags() ) - : type( type_ ) - , flags( flags_ ) - , pipelineLayout( pipelineLayout_ ) - , stageFlags( stageFlags_ ) - {} - - explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(), - vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags() ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , pipelineLayout( pipelineLayout_ ) - , stageFlags( stageFlags_ ) - - {} - - ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTablePushConstantEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) - { - type = type_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setPipelineLayout( vk::PipelineLayout pipelineLayout_ ) - { - pipelineLayout = pipelineLayout_; - return *this; - } - - ObjectTablePushConstantEntryNVX & setStageFlags( vk::ShaderStageFlags stageFlags_ ) - { - stageFlags = stageFlags_; - return *this; - } - - operator VkObjectTablePushConstantEntryNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTablePushConstantEntryNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( pipelineLayout == rhs.pipelineLayout ) - && ( stageFlags == rhs.stageFlags ); - } - - bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ObjectEntryTypeNVX type; - vk::ObjectEntryUsageFlagsNVX flags; - vk::PipelineLayout pipelineLayout; - vk::ShaderStageFlags stageFlags; - }; - static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ObjectTableVertexBufferEntryNVX - { - ObjectTableVertexBufferEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet, - vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(), - vk::Buffer buffer_ = vk::Buffer() ) - : type( type_ ) - , flags( flags_ ) - , buffer( buffer_ ) - {} - - explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, - vk::Buffer buffer_ = vk::Buffer() ) - : type( objectTableEntryNVX.type ) - , flags( objectTableEntryNVX.flags ) - , buffer( buffer_ ) - - {} - - ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) - { - type = type_; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) - { - flags = flags_; - return *this; - } - - ObjectTableVertexBufferEntryNVX & setBuffer( vk::Buffer buffer_ ) - { - buffer = buffer_; - return *this; - } - - operator VkObjectTableVertexBufferEntryNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkObjectTableVertexBufferEntryNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const - { - return ( type == rhs.type ) - && ( flags == rhs.flags ) - && ( buffer == rhs.buffer ); - } - - bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ObjectEntryTypeNVX type; - vk::ObjectEntryUsageFlagsNVX flags; - vk::Buffer buffer; - }; - static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PastPresentationTimingGOOGLE - { - operator VkPastPresentationTimingGOOGLE const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPastPresentationTimingGOOGLE &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PastPresentationTimingGOOGLE const& rhs ) const - { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ) - && ( actualPresentTime == rhs.actualPresentTime ) - && ( earliestPresentTime == rhs.earliestPresentTime ) - && ( presentMargin == rhs.presentMargin ); - } - - bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t presentID; - uint64_t desiredPresentTime; - uint64_t actualPresentTime; - uint64_t earliestPresentTime; - uint64_t presentMargin; - }; - static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PerformanceConfigurationAcquireInfoINTEL - { - protected: - PerformanceConfigurationAcquireInfoINTEL( vk::PerformanceConfigurationTypeINTEL type_ = vk::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) - : type( type_ ) - {} - - PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; - const void* pNext = nullptr; - vk::PerformanceConfigurationTypeINTEL type; - }; - static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "layout struct and wrapper have different size!" ); - } - - struct PerformanceConfigurationAcquireInfoINTEL : public layout::PerformanceConfigurationAcquireInfoINTEL - { - PerformanceConfigurationAcquireInfoINTEL( vk::PerformanceConfigurationTypeINTEL type_ = vk::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) - : layout::PerformanceConfigurationAcquireInfoINTEL( type_ ) - {} - - PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) - : layout::PerformanceConfigurationAcquireInfoINTEL( rhs ) - {} - - PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PerformanceConfigurationAcquireInfoINTEL & setType( vk::PerformanceConfigurationTypeINTEL type_ ) - { - type = type_; - return *this; - } - - operator VkPerformanceConfigurationAcquireInfoINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceConfigurationAcquireInfoINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ); - } - - bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PerformanceConfigurationAcquireInfoINTEL::sType; - }; - static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PerformanceMarkerInfoINTEL - { - protected: - PerformanceMarkerInfoINTEL( uint64_t marker_ = 0 ) - : marker( marker_ ) - {} - - PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; - const void* pNext = nullptr; - uint64_t marker; - }; - static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "layout struct and wrapper have different size!" ); - } - - struct PerformanceMarkerInfoINTEL : public layout::PerformanceMarkerInfoINTEL - { - PerformanceMarkerInfoINTEL( uint64_t marker_ = 0 ) - : layout::PerformanceMarkerInfoINTEL( marker_ ) - {} - - PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) - : layout::PerformanceMarkerInfoINTEL( rhs ) - {} - - PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) - { - marker = marker_; - return *this; - } - - operator VkPerformanceMarkerInfoINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceMarkerInfoINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); - } - - bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PerformanceMarkerInfoINTEL::sType; - }; - static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PerformanceOverrideInfoINTEL - { - protected: - PerformanceOverrideInfoINTEL( vk::PerformanceOverrideTypeINTEL type_ = vk::PerformanceOverrideTypeINTEL::eNullHardware, - vk::Bool32 enable_ = 0, - uint64_t parameter_ = 0 ) - : type( type_ ) - , enable( enable_ ) - , parameter( parameter_ ) - {} - - PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; - const void* pNext = nullptr; - vk::PerformanceOverrideTypeINTEL type; - vk::Bool32 enable; - uint64_t parameter; - }; - static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "layout struct and wrapper have different size!" ); - } - - struct PerformanceOverrideInfoINTEL : public layout::PerformanceOverrideInfoINTEL - { - PerformanceOverrideInfoINTEL( vk::PerformanceOverrideTypeINTEL type_ = vk::PerformanceOverrideTypeINTEL::eNullHardware, - vk::Bool32 enable_ = 0, - uint64_t parameter_ = 0 ) - : layout::PerformanceOverrideInfoINTEL( type_, enable_, parameter_ ) - {} - - PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) - : layout::PerformanceOverrideInfoINTEL( rhs ) - {} - - PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PerformanceOverrideInfoINTEL & setType( vk::PerformanceOverrideTypeINTEL type_ ) - { - type = type_; - return *this; - } - - PerformanceOverrideInfoINTEL & setEnable( vk::Bool32 enable_ ) - { - enable = enable_; - return *this; - } - - PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) - { - parameter = parameter_; - return *this; - } - - operator VkPerformanceOverrideInfoINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceOverrideInfoINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( enable == rhs.enable ) - && ( parameter == rhs.parameter ); - } - - bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PerformanceOverrideInfoINTEL::sType; - }; - static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PerformanceStreamMarkerInfoINTEL - { - protected: - PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = 0 ) - : marker( marker_ ) - {} - - PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; - const void* pNext = nullptr; - uint32_t marker; - }; - static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "layout struct and wrapper have different size!" ); - } - - struct PerformanceStreamMarkerInfoINTEL : public layout::PerformanceStreamMarkerInfoINTEL - { - PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = 0 ) - : layout::PerformanceStreamMarkerInfoINTEL( marker_ ) - {} - - PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) - : layout::PerformanceStreamMarkerInfoINTEL( rhs ) - {} - - PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) - { - marker = marker_; - return *this; - } - - operator VkPerformanceStreamMarkerInfoINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceStreamMarkerInfoINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( marker == rhs.marker ); - } - - bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PerformanceStreamMarkerInfoINTEL::sType; - }; - static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - union PerformanceValueDataINTEL - { - PerformanceValueDataINTEL( uint32_t value32_ = 0 ) - { - value32 = value32_; - } - - PerformanceValueDataINTEL( uint64_t value64_ ) - { - value64 = value64_; - } - - PerformanceValueDataINTEL( float valueFloat_ ) - { - valueFloat = valueFloat_; - } - - PerformanceValueDataINTEL( const char* valueString_ ) - { - valueString = valueString_; - } - - PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) - { - value32 = value32_; - return *this; - } - - PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) - { - value64 = value64_; - return *this; - } - - PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) - { - valueFloat = valueFloat_; - return *this; - } - - PerformanceValueDataINTEL & setValueBool( vk::Bool32 valueBool_ ) - { - valueBool = valueBool_; - return *this; - } - - PerformanceValueDataINTEL & setValueString( const char* valueString_ ) - { - valueString = valueString_; - return *this; - } - operator VkPerformanceValueDataINTEL const&() const - { - return *reinterpret_cast(this); - } - - operator VkPerformanceValueDataINTEL &() - { - return *reinterpret_cast(this); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - uint32_t value32; - uint64_t value64; - float valueFloat; - vk::Bool32 valueBool; - const char* valueString; -#else - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char* valueString; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct PerformanceValueINTEL - { - PerformanceValueINTEL( vk::PerformanceValueTypeINTEL type_ = vk::PerformanceValueTypeINTEL::eUint32, - vk::PerformanceValueDataINTEL data_ = vk::PerformanceValueDataINTEL() ) - : type( type_ ) - , data( data_ ) - {} - - PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PerformanceValueINTEL & setType( vk::PerformanceValueTypeINTEL type_ ) - { - type = type_; - return *this; - } - - PerformanceValueINTEL & setData( vk::PerformanceValueDataINTEL data_ ) - { - data = data_; - return *this; - } - - operator VkPerformanceValueINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceValueINTEL &() - { - return *reinterpret_cast( this ); - } - - public: - vk::PerformanceValueTypeINTEL type; - vk::PerformanceValueDataINTEL data; - }; - static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDevice16BitStorageFeatures - { - protected: - PhysicalDevice16BitStorageFeatures( vk::Bool32 storageBuffer16BitAccess_ = 0, - vk::Bool32 uniformAndStorageBuffer16BitAccess_ = 0, - vk::Bool32 storagePushConstant16_ = 0, - vk::Bool32 storageInputOutput16_ = 0 ) - : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) - {} - - PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; - void* pNext = nullptr; - vk::Bool32 storageBuffer16BitAccess; - vk::Bool32 uniformAndStorageBuffer16BitAccess; - vk::Bool32 storagePushConstant16; - vk::Bool32 storageInputOutput16; - }; - static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDevice16BitStorageFeatures : public layout::PhysicalDevice16BitStorageFeatures - { - PhysicalDevice16BitStorageFeatures( vk::Bool32 storageBuffer16BitAccess_ = 0, - vk::Bool32 uniformAndStorageBuffer16BitAccess_ = 0, - vk::Bool32 storagePushConstant16_ = 0, - vk::Bool32 storageInputOutput16_ = 0 ) - : layout::PhysicalDevice16BitStorageFeatures( storageBuffer16BitAccess_, uniformAndStorageBuffer16BitAccess_, storagePushConstant16_, storageInputOutput16_ ) - {} - - PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) - : layout::PhysicalDevice16BitStorageFeatures( rhs ) - {} - - PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( vk::Bool32 storageBuffer16BitAccess_ ) - { - storageBuffer16BitAccess = storageBuffer16BitAccess_; - return *this; - } - - PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( vk::Bool32 uniformAndStorageBuffer16BitAccess_ ) - { - uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; - return *this; - } - - PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( vk::Bool32 storagePushConstant16_ ) - { - storagePushConstant16 = storagePushConstant16_; - return *this; - } - - PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( vk::Bool32 storageInputOutput16_ ) - { - storageInputOutput16 = storageInputOutput16_; - return *this; - } - - operator VkPhysicalDevice16BitStorageFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevice16BitStorageFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) - && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) - && ( storagePushConstant16 == rhs.storagePushConstant16 ) - && ( storageInputOutput16 == rhs.storageInputOutput16 ); - } - - bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDevice16BitStorageFeatures::sType; - }; - static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDevice8BitStorageFeaturesKHR - { - protected: - PhysicalDevice8BitStorageFeaturesKHR( vk::Bool32 storageBuffer8BitAccess_ = 0, - vk::Bool32 uniformAndStorageBuffer8BitAccess_ = 0, - vk::Bool32 storagePushConstant8_ = 0 ) - : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) - {} - - PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR; - void* pNext = nullptr; - vk::Bool32 storageBuffer8BitAccess; - vk::Bool32 uniformAndStorageBuffer8BitAccess; - vk::Bool32 storagePushConstant8; - }; - static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDevice8BitStorageFeaturesKHR : public layout::PhysicalDevice8BitStorageFeaturesKHR - { - PhysicalDevice8BitStorageFeaturesKHR( vk::Bool32 storageBuffer8BitAccess_ = 0, - vk::Bool32 uniformAndStorageBuffer8BitAccess_ = 0, - vk::Bool32 storagePushConstant8_ = 0 ) - : layout::PhysicalDevice8BitStorageFeaturesKHR( storageBuffer8BitAccess_, uniformAndStorageBuffer8BitAccess_, storagePushConstant8_ ) - {} - - PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) - : layout::PhysicalDevice8BitStorageFeaturesKHR( rhs ) - {} - - PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDevice8BitStorageFeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDevice8BitStorageFeaturesKHR & setStorageBuffer8BitAccess( vk::Bool32 storageBuffer8BitAccess_ ) - { - storageBuffer8BitAccess = storageBuffer8BitAccess_; - return *this; - } - - PhysicalDevice8BitStorageFeaturesKHR & setUniformAndStorageBuffer8BitAccess( vk::Bool32 uniformAndStorageBuffer8BitAccess_ ) - { - uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; - return *this; - } - - PhysicalDevice8BitStorageFeaturesKHR & setStoragePushConstant8( vk::Bool32 storagePushConstant8_ ) - { - storagePushConstant8 = storagePushConstant8_; - return *this; - } - - operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevice8BitStorageFeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) - && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) - && ( storagePushConstant8 == rhs.storagePushConstant8 ); - } - - bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDevice8BitStorageFeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceASTCDecodeFeaturesEXT - { - protected: - PhysicalDeviceASTCDecodeFeaturesEXT( vk::Bool32 decodeModeSharedExponent_ = 0 ) - : decodeModeSharedExponent( decodeModeSharedExponent_ ) - {} - - PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 decodeModeSharedExponent; - }; - static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceASTCDecodeFeaturesEXT : public layout::PhysicalDeviceASTCDecodeFeaturesEXT - { - PhysicalDeviceASTCDecodeFeaturesEXT( vk::Bool32 decodeModeSharedExponent_ = 0 ) - : layout::PhysicalDeviceASTCDecodeFeaturesEXT( decodeModeSharedExponent_ ) - {} - - PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) - : layout::PhysicalDeviceASTCDecodeFeaturesEXT( rhs ) - {} - - PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( vk::Bool32 decodeModeSharedExponent_ ) - { - decodeModeSharedExponent = decodeModeSharedExponent_; - return *this; - } - - operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); - } - - bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceASTCDecodeFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT - { - protected: - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( vk::Bool32 advancedBlendCoherentOperations_ = 0 ) - : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) - {} - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 advancedBlendCoherentOperations; - }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT : public layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT - { - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( vk::Bool32 advancedBlendCoherentOperations_ = 0 ) - : layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT( advancedBlendCoherentOperations_ ) - {} - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) - : layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT( rhs ) - {} - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( vk::Bool32 advancedBlendCoherentOperations_ ) - { - advancedBlendCoherentOperations = advancedBlendCoherentOperations_; - return *this; - } - - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); - } - - bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT - { - protected: - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = 0, - vk::Bool32 advancedBlendIndependentBlend_ = 0, - vk::Bool32 advancedBlendNonPremultipliedSrcColor_ = 0, - vk::Bool32 advancedBlendNonPremultipliedDstColor_ = 0, - vk::Bool32 advancedBlendCorrelatedOverlap_ = 0, - vk::Bool32 advancedBlendAllOperations_ = 0 ) - : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) - , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) - , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) - , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) - , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) - , advancedBlendAllOperations( advancedBlendAllOperations_ ) - {} - - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; - void* pNext = nullptr; - uint32_t advancedBlendMaxColorAttachments; - vk::Bool32 advancedBlendIndependentBlend; - vk::Bool32 advancedBlendNonPremultipliedSrcColor; - vk::Bool32 advancedBlendNonPremultipliedDstColor; - vk::Bool32 advancedBlendCorrelatedOverlap; - vk::Bool32 advancedBlendAllOperations; - }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT : public layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT - { - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) - && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) - && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) - && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) - && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) - && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); - } - - bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT - { - protected: - PhysicalDeviceBufferDeviceAddressFeaturesEXT( vk::Bool32 bufferDeviceAddress_ = 0, - vk::Bool32 bufferDeviceAddressCaptureReplay_ = 0, - vk::Bool32 bufferDeviceAddressMultiDevice_ = 0 ) - : bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) - {} - - PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 bufferDeviceAddress; - vk::Bool32 bufferDeviceAddressCaptureReplay; - vk::Bool32 bufferDeviceAddressMultiDevice; - }; - static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT : public layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT - { - PhysicalDeviceBufferDeviceAddressFeaturesEXT( vk::Bool32 bufferDeviceAddress_ = 0, - vk::Bool32 bufferDeviceAddressCaptureReplay_ = 0, - vk::Bool32 bufferDeviceAddressMultiDevice_ = 0 ) - : layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT( bufferDeviceAddress_, bufferDeviceAddressCaptureReplay_, bufferDeviceAddressMultiDevice_ ) - {} - - PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) - : layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT( rhs ) - {} - - PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( vk::Bool32 bufferDeviceAddress_ ) - { - bufferDeviceAddress = bufferDeviceAddress_; - return *this; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( vk::Bool32 bufferDeviceAddressCaptureReplay_ ) - { - bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; - return *this; - } - - PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( vk::Bool32 bufferDeviceAddressMultiDevice_ ) - { - bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; - return *this; - } - - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) - && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) - && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); - } - - bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceCoherentMemoryFeaturesAMD - { - protected: - PhysicalDeviceCoherentMemoryFeaturesAMD( vk::Bool32 deviceCoherentMemory_ = 0 ) - : deviceCoherentMemory( deviceCoherentMemory_ ) - {} - - PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; - void* pNext = nullptr; - vk::Bool32 deviceCoherentMemory; - }; - static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceCoherentMemoryFeaturesAMD : public layout::PhysicalDeviceCoherentMemoryFeaturesAMD - { - PhysicalDeviceCoherentMemoryFeaturesAMD( vk::Bool32 deviceCoherentMemory_ = 0 ) - : layout::PhysicalDeviceCoherentMemoryFeaturesAMD( deviceCoherentMemory_ ) - {} - - PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) - : layout::PhysicalDeviceCoherentMemoryFeaturesAMD( rhs ) - {} - - PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( vk::Bool32 deviceCoherentMemory_ ) - { - deviceCoherentMemory = deviceCoherentMemory_; - return *this; - } - - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); - } - - bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceCoherentMemoryFeaturesAMD::sType; - }; - static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV - { - protected: - PhysicalDeviceComputeShaderDerivativesFeaturesNV( vk::Bool32 computeDerivativeGroupQuads_ = 0, - vk::Bool32 computeDerivativeGroupLinear_ = 0 ) - : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) - , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) - {} - - PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; - void* pNext = nullptr; - vk::Bool32 computeDerivativeGroupQuads; - vk::Bool32 computeDerivativeGroupLinear; - }; - static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV : public layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV - { - PhysicalDeviceComputeShaderDerivativesFeaturesNV( vk::Bool32 computeDerivativeGroupQuads_ = 0, - vk::Bool32 computeDerivativeGroupLinear_ = 0 ) - : layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV( computeDerivativeGroupQuads_, computeDerivativeGroupLinear_ ) - {} - - PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) - : layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV( rhs ) - {} - - PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( vk::Bool32 computeDerivativeGroupQuads_ ) - { - computeDerivativeGroupQuads = computeDerivativeGroupQuads_; - return *this; - } - - PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( vk::Bool32 computeDerivativeGroupLinear_ ) - { - computeDerivativeGroupLinear = computeDerivativeGroupLinear_; - return *this; - } - - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) - && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); - } - - bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceConditionalRenderingFeaturesEXT - { - protected: - PhysicalDeviceConditionalRenderingFeaturesEXT( vk::Bool32 conditionalRendering_ = 0, - vk::Bool32 inheritedConditionalRendering_ = 0 ) - : conditionalRendering( conditionalRendering_ ) - , inheritedConditionalRendering( inheritedConditionalRendering_ ) - {} - - PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 conditionalRendering; - vk::Bool32 inheritedConditionalRendering; - }; - static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceConditionalRenderingFeaturesEXT : public layout::PhysicalDeviceConditionalRenderingFeaturesEXT - { - PhysicalDeviceConditionalRenderingFeaturesEXT( vk::Bool32 conditionalRendering_ = 0, - vk::Bool32 inheritedConditionalRendering_ = 0 ) - : layout::PhysicalDeviceConditionalRenderingFeaturesEXT( conditionalRendering_, inheritedConditionalRendering_ ) - {} - - PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) - : layout::PhysicalDeviceConditionalRenderingFeaturesEXT( rhs ) - {} - - PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( vk::Bool32 conditionalRendering_ ) - { - conditionalRendering = conditionalRendering_; - return *this; - } - - PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( vk::Bool32 inheritedConditionalRendering_ ) - { - inheritedConditionalRendering = inheritedConditionalRendering_; - return *this; - } - - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conditionalRendering == rhs.conditionalRendering ) - && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); - } - - bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceConditionalRenderingFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceConservativeRasterizationPropertiesEXT - { - protected: - PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = 0, - float maxExtraPrimitiveOverestimationSize_ = 0, - float extraPrimitiveOverestimationSizeGranularity_ = 0, - vk::Bool32 primitiveUnderestimation_ = 0, - vk::Bool32 conservativePointAndLineRasterization_ = 0, - vk::Bool32 degenerateTrianglesRasterized_ = 0, - vk::Bool32 degenerateLinesRasterized_ = 0, - vk::Bool32 fullyCoveredFragmentShaderInputVariable_ = 0, - vk::Bool32 conservativeRasterizationPostDepthCoverage_ = 0 ) - : primitiveOverestimationSize( primitiveOverestimationSize_ ) - , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) - , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) - , primitiveUnderestimation( primitiveUnderestimation_ ) - , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) - , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) - , degenerateLinesRasterized( degenerateLinesRasterized_ ) - , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) - , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) - {} - - PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; - void* pNext = nullptr; - float primitiveOverestimationSize; - float maxExtraPrimitiveOverestimationSize; - float extraPrimitiveOverestimationSizeGranularity; - vk::Bool32 primitiveUnderestimation; - vk::Bool32 conservativePointAndLineRasterization; - vk::Bool32 degenerateTrianglesRasterized; - vk::Bool32 degenerateLinesRasterized; - vk::Bool32 fullyCoveredFragmentShaderInputVariable; - vk::Bool32 conservativeRasterizationPostDepthCoverage; - }; - static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceConservativeRasterizationPropertiesEXT : public layout::PhysicalDeviceConservativeRasterizationPropertiesEXT - { - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) - && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) - && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) - && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) - && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) - && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) - && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) - && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) - && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); - } - - bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceConservativeRasterizationPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceCooperativeMatrixFeaturesNV - { - protected: - PhysicalDeviceCooperativeMatrixFeaturesNV( vk::Bool32 cooperativeMatrix_ = 0, - vk::Bool32 cooperativeMatrixRobustBufferAccess_ = 0 ) - : cooperativeMatrix( cooperativeMatrix_ ) - , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) - {} - - PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; - void* pNext = nullptr; - vk::Bool32 cooperativeMatrix; - vk::Bool32 cooperativeMatrixRobustBufferAccess; - }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceCooperativeMatrixFeaturesNV : public layout::PhysicalDeviceCooperativeMatrixFeaturesNV - { - PhysicalDeviceCooperativeMatrixFeaturesNV( vk::Bool32 cooperativeMatrix_ = 0, - vk::Bool32 cooperativeMatrixRobustBufferAccess_ = 0 ) - : layout::PhysicalDeviceCooperativeMatrixFeaturesNV( cooperativeMatrix_, cooperativeMatrixRobustBufferAccess_ ) - {} - - PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) - : layout::PhysicalDeviceCooperativeMatrixFeaturesNV( rhs ) - {} - - PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( vk::Bool32 cooperativeMatrix_ ) - { - cooperativeMatrix = cooperativeMatrix_; - return *this; - } - - PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( vk::Bool32 cooperativeMatrixRobustBufferAccess_ ) - { - cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; - return *this; - } - - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cooperativeMatrix == rhs.cooperativeMatrix ) - && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); - } - - bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceCooperativeMatrixFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceCooperativeMatrixPropertiesNV - { - protected: - PhysicalDeviceCooperativeMatrixPropertiesNV( vk::ShaderStageFlags cooperativeMatrixSupportedStages_ = vk::ShaderStageFlags() ) - : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) - {} - - PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; - void* pNext = nullptr; - vk::ShaderStageFlags cooperativeMatrixSupportedStages; - }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceCooperativeMatrixPropertiesNV : public layout::PhysicalDeviceCooperativeMatrixPropertiesNV - { - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); - } - - bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceCooperativeMatrixPropertiesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceCornerSampledImageFeaturesNV - { - protected: - PhysicalDeviceCornerSampledImageFeaturesNV( vk::Bool32 cornerSampledImage_ = 0 ) - : cornerSampledImage( cornerSampledImage_ ) - {} - - PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; - void* pNext = nullptr; - vk::Bool32 cornerSampledImage; - }; - static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceCornerSampledImageFeaturesNV : public layout::PhysicalDeviceCornerSampledImageFeaturesNV - { - PhysicalDeviceCornerSampledImageFeaturesNV( vk::Bool32 cornerSampledImage_ = 0 ) - : layout::PhysicalDeviceCornerSampledImageFeaturesNV( cornerSampledImage_ ) - {} - - PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) - : layout::PhysicalDeviceCornerSampledImageFeaturesNV( rhs ) - {} - - PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( vk::Bool32 cornerSampledImage_ ) - { - cornerSampledImage = cornerSampledImage_; - return *this; - } - - operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( cornerSampledImage == rhs.cornerSampledImage ); - } - - bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceCornerSampledImageFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceCoverageReductionModeFeaturesNV - { - protected: - PhysicalDeviceCoverageReductionModeFeaturesNV( vk::Bool32 coverageReductionMode_ = 0 ) - : coverageReductionMode( coverageReductionMode_ ) - {} - - PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; - void* pNext = nullptr; - vk::Bool32 coverageReductionMode; - }; - static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceCoverageReductionModeFeaturesNV : public layout::PhysicalDeviceCoverageReductionModeFeaturesNV - { - PhysicalDeviceCoverageReductionModeFeaturesNV( vk::Bool32 coverageReductionMode_ = 0 ) - : layout::PhysicalDeviceCoverageReductionModeFeaturesNV( coverageReductionMode_ ) - {} - - PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) - : layout::PhysicalDeviceCoverageReductionModeFeaturesNV( rhs ) - {} - - PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( vk::Bool32 coverageReductionMode_ ) - { - coverageReductionMode = coverageReductionMode_; - return *this; - } - - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( coverageReductionMode == rhs.coverageReductionMode ); - } - - bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceCoverageReductionModeFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV - { - protected: - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( vk::Bool32 dedicatedAllocationImageAliasing_ = 0 ) - : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) - {} - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - void* pNext = nullptr; - vk::Bool32 dedicatedAllocationImageAliasing; - }; - static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : public layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV - { - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( vk::Bool32 dedicatedAllocationImageAliasing_ = 0 ) - : layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( dedicatedAllocationImageAliasing_ ) - {} - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) - : layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( rhs ) - {} - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( vk::Bool32 dedicatedAllocationImageAliasing_ ) - { - dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; - return *this; - } - - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); - } - - bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDepthClipEnableFeaturesEXT - { - protected: - PhysicalDeviceDepthClipEnableFeaturesEXT( vk::Bool32 depthClipEnable_ = 0 ) - : depthClipEnable( depthClipEnable_ ) - {} - - PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 depthClipEnable; - }; - static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDepthClipEnableFeaturesEXT : public layout::PhysicalDeviceDepthClipEnableFeaturesEXT - { - PhysicalDeviceDepthClipEnableFeaturesEXT( vk::Bool32 depthClipEnable_ = 0 ) - : layout::PhysicalDeviceDepthClipEnableFeaturesEXT( depthClipEnable_ ) - {} - - PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) - : layout::PhysicalDeviceDepthClipEnableFeaturesEXT( rhs ) - {} - - PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( vk::Bool32 depthClipEnable_ ) - { - depthClipEnable = depthClipEnable_; - return *this; - } - - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( depthClipEnable == rhs.depthClipEnable ); - } - - bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDepthClipEnableFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDepthStencilResolvePropertiesKHR - { - protected: - PhysicalDeviceDepthStencilResolvePropertiesKHR( vk::ResolveModeFlagsKHR supportedDepthResolveModes_ = vk::ResolveModeFlagsKHR(), - vk::ResolveModeFlagsKHR supportedStencilResolveModes_ = vk::ResolveModeFlagsKHR(), - vk::Bool32 independentResolveNone_ = 0, - vk::Bool32 independentResolve_ = 0 ) - : supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) - {} - - PhysicalDeviceDepthStencilResolvePropertiesKHR( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDepthStencilResolvePropertiesKHR& operator=( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR; - void* pNext = nullptr; - vk::ResolveModeFlagsKHR supportedDepthResolveModes; - vk::ResolveModeFlagsKHR supportedStencilResolveModes; - vk::Bool32 independentResolveNone; - vk::Bool32 independentResolve; - }; - static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDepthStencilResolvePropertiesKHR : public layout::PhysicalDeviceDepthStencilResolvePropertiesKHR - { - operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) - && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) - && ( independentResolveNone == rhs.independentResolveNone ) - && ( independentResolve == rhs.independentResolve ); - } - - bool operator!=( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDepthStencilResolvePropertiesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDescriptorIndexingFeaturesEXT - { - protected: - PhysicalDeviceDescriptorIndexingFeaturesEXT( vk::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0, - vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0, - vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0, - vk::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0, - vk::Bool32 descriptorBindingPartiallyBound_ = 0, - vk::Bool32 descriptorBindingVariableDescriptorCount_ = 0, - vk::Bool32 runtimeDescriptorArray_ = 0 ) - : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) - {} - - PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 shaderInputAttachmentArrayDynamicIndexing; - vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing; - vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing; - vk::Bool32 shaderUniformBufferArrayNonUniformIndexing; - vk::Bool32 shaderSampledImageArrayNonUniformIndexing; - vk::Bool32 shaderStorageBufferArrayNonUniformIndexing; - vk::Bool32 shaderStorageImageArrayNonUniformIndexing; - vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing; - vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing; - vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing; - vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind; - vk::Bool32 descriptorBindingSampledImageUpdateAfterBind; - vk::Bool32 descriptorBindingStorageImageUpdateAfterBind; - vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind; - vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind; - vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind; - vk::Bool32 descriptorBindingUpdateUnusedWhilePending; - vk::Bool32 descriptorBindingPartiallyBound; - vk::Bool32 descriptorBindingVariableDescriptorCount; - vk::Bool32 runtimeDescriptorArray; - }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDescriptorIndexingFeaturesEXT : public layout::PhysicalDeviceDescriptorIndexingFeaturesEXT - { - PhysicalDeviceDescriptorIndexingFeaturesEXT( vk::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0, - vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0, - vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0, - vk::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0, - vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0, - vk::Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0, - vk::Bool32 descriptorBindingPartiallyBound_ = 0, - vk::Bool32 descriptorBindingVariableDescriptorCount_ = 0, - vk::Bool32 runtimeDescriptorArray_ = 0 ) - : layout::PhysicalDeviceDescriptorIndexingFeaturesEXT( shaderInputAttachmentArrayDynamicIndexing_, shaderUniformTexelBufferArrayDynamicIndexing_, shaderStorageTexelBufferArrayDynamicIndexing_, shaderUniformBufferArrayNonUniformIndexing_, shaderSampledImageArrayNonUniformIndexing_, shaderStorageBufferArrayNonUniformIndexing_, shaderStorageImageArrayNonUniformIndexing_, shaderInputAttachmentArrayNonUniformIndexing_, shaderUniformTexelBufferArrayNonUniformIndexing_, shaderStorageTexelBufferArrayNonUniformIndexing_, descriptorBindingUniformBufferUpdateAfterBind_, descriptorBindingSampledImageUpdateAfterBind_, descriptorBindingStorageImageUpdateAfterBind_, descriptorBindingStorageBufferUpdateAfterBind_, descriptorBindingUniformTexelBufferUpdateAfterBind_, descriptorBindingStorageTexelBufferUpdateAfterBind_, descriptorBindingUpdateUnusedWhilePending_, descriptorBindingPartiallyBound_, descriptorBindingVariableDescriptorCount_, runtimeDescriptorArray_ ) - {} - - PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) - : layout::PhysicalDeviceDescriptorIndexingFeaturesEXT( rhs ) - {} - - PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayDynamicIndexing( vk::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) - { - shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayDynamicIndexing( vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) - { - shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayDynamicIndexing( vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) - { - shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformBufferArrayNonUniformIndexing( vk::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) - { - shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderSampledImageArrayNonUniformIndexing( vk::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) - { - shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageBufferArrayNonUniformIndexing( vk::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) - { - shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageImageArrayNonUniformIndexing( vk::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) - { - shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayNonUniformIndexing( vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) - { - shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayNonUniformIndexing( vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) - { - shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayNonUniformIndexing( vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) - { - shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformBufferUpdateAfterBind( vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) - { - descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingSampledImageUpdateAfterBind( vk::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) - { - descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageImageUpdateAfterBind( vk::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) - { - descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageBufferUpdateAfterBind( vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) - { - descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformTexelBufferUpdateAfterBind( vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - { - descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageTexelBufferUpdateAfterBind( vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - { - descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUpdateUnusedWhilePending( vk::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) - { - descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingPartiallyBound( vk::Bool32 descriptorBindingPartiallyBound_ ) - { - descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingVariableDescriptorCount( vk::Bool32 descriptorBindingVariableDescriptorCount_ ) - { - descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; - return *this; - } - - PhysicalDeviceDescriptorIndexingFeaturesEXT & setRuntimeDescriptorArray( vk::Bool32 runtimeDescriptorArray_ ) - { - runtimeDescriptorArray = runtimeDescriptorArray_; - return *this; - } - - operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) - && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) - && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) - && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) - && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) - && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) - && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) - && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) - && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) - && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) - && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) - && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) - && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) - && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) - && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) - && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) - && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) - && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) - && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) - && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); - } - - bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDescriptorIndexingFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDescriptorIndexingPropertiesEXT - { - protected: - PhysicalDeviceDescriptorIndexingPropertiesEXT( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = 0, - vk::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = 0, - vk::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = 0, - vk::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = 0, - vk::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = 0, - vk::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = 0, - vk::Bool32 robustBufferAccessUpdateAfterBind_ = 0, - vk::Bool32 quadDivergentImplicitLod_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = 0, - uint32_t maxPerStageUpdateAfterBindResources_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = 0 ) - : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) - {} - - PhysicalDeviceDescriptorIndexingPropertiesEXT( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDescriptorIndexingPropertiesEXT& operator=( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT; - void* pNext = nullptr; - uint32_t maxUpdateAfterBindDescriptorsInAllPools; - vk::Bool32 shaderUniformBufferArrayNonUniformIndexingNative; - vk::Bool32 shaderSampledImageArrayNonUniformIndexingNative; - vk::Bool32 shaderStorageBufferArrayNonUniformIndexingNative; - vk::Bool32 shaderStorageImageArrayNonUniformIndexingNative; - vk::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative; - vk::Bool32 robustBufferAccessUpdateAfterBind; - vk::Bool32 quadDivergentImplicitLod; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; - uint32_t maxPerStageUpdateAfterBindResources; - uint32_t maxDescriptorSetUpdateAfterBindSamplers; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; - }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDescriptorIndexingPropertiesEXT : public layout::PhysicalDeviceDescriptorIndexingPropertiesEXT - { - operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) - && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) - && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) - && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) - && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) - && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) - && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) - && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) - && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) - && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) - && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) - && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) - && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) - && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) - && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) - && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) - && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) - && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) - && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) - && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); - } - - bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDescriptorIndexingPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDiscardRectanglePropertiesEXT - { - protected: - PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 ) - : maxDiscardRectangles( maxDiscardRectangles_ ) - {} - - PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; - void* pNext = nullptr; - uint32_t maxDiscardRectangles; - }; - static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDiscardRectanglePropertiesEXT : public layout::PhysicalDeviceDiscardRectanglePropertiesEXT - { - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); - } - - bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDiscardRectanglePropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceDriverPropertiesKHR - { - protected: - PhysicalDeviceDriverPropertiesKHR( vk::DriverIdKHR driverID_ = vk::DriverIdKHR::eAmdProprietary, - std::array const& driverName_ = { { 0 } }, - std::array const& driverInfo_ = { { 0 } }, - vk::ConformanceVersionKHR conformanceVersion_ = vk::ConformanceVersionKHR() ) - : driverID( driverID_ ) - , conformanceVersion( conformanceVersion_ ) - { - memcpy( &driverName, driverName_.data(), VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ); - memcpy( &driverInfo, driverInfo_.data(), VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ); - - } - - PhysicalDeviceDriverPropertiesKHR( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceDriverPropertiesKHR& operator=( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR; - void* pNext = nullptr; - vk::DriverIdKHR driverID; - char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR]; - char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR]; - vk::ConformanceVersionKHR conformanceVersion; - }; - static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceDriverPropertiesKHR : public layout::PhysicalDeviceDriverPropertiesKHR - { - operator VkPhysicalDeviceDriverPropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDriverPropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( driverID == rhs.driverID ) - && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 ) - && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 ) - && ( conformanceVersion == rhs.conformanceVersion ); - } - - bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceDriverPropertiesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceExclusiveScissorFeaturesNV - { - protected: - PhysicalDeviceExclusiveScissorFeaturesNV( vk::Bool32 exclusiveScissor_ = 0 ) - : exclusiveScissor( exclusiveScissor_ ) - {} - - PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; - void* pNext = nullptr; - vk::Bool32 exclusiveScissor; - }; - static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceExclusiveScissorFeaturesNV : public layout::PhysicalDeviceExclusiveScissorFeaturesNV - { - PhysicalDeviceExclusiveScissorFeaturesNV( vk::Bool32 exclusiveScissor_ = 0 ) - : layout::PhysicalDeviceExclusiveScissorFeaturesNV( exclusiveScissor_ ) - {} - - PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) - : layout::PhysicalDeviceExclusiveScissorFeaturesNV( rhs ) - {} - - PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( vk::Bool32 exclusiveScissor_ ) - { - exclusiveScissor = exclusiveScissor_; - return *this; - } - - operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exclusiveScissor == rhs.exclusiveScissor ); - } - - bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceExclusiveScissorFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceExternalBufferInfo - { - protected: - PhysicalDeviceExternalBufferInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(), - vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(), - vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : flags( flags_ ) - , usage( usage_ ) - , handleType( handleType_ ) - {} - - PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; - const void* pNext = nullptr; - vk::BufferCreateFlags flags; - vk::BufferUsageFlags usage; - vk::ExternalMemoryHandleTypeFlagBits handleType; - }; - static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceExternalBufferInfo : public layout::PhysicalDeviceExternalBufferInfo - { - PhysicalDeviceExternalBufferInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(), - vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(), - vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : layout::PhysicalDeviceExternalBufferInfo( flags_, usage_, handleType_ ) - {} - - PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) - : layout::PhysicalDeviceExternalBufferInfo( rhs ) - {} - - PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalBufferInfo & setFlags( vk::BufferCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PhysicalDeviceExternalBufferInfo & setUsage( vk::BufferUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - PhysicalDeviceExternalBufferInfo & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkPhysicalDeviceExternalBufferInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalBufferInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( usage == rhs.usage ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceExternalBufferInfo::sType; - }; - static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceExternalFenceInfo - { - protected: - PhysicalDeviceExternalFenceInfo( vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) - : handleType( handleType_ ) - {} - - PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; - const void* pNext = nullptr; - vk::ExternalFenceHandleTypeFlagBits handleType; - }; - static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceExternalFenceInfo : public layout::PhysicalDeviceExternalFenceInfo - { - PhysicalDeviceExternalFenceInfo( vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) - : layout::PhysicalDeviceExternalFenceInfo( handleType_ ) - {} - - PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) - : layout::PhysicalDeviceExternalFenceInfo( rhs ) - {} - - PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalFenceInfo & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkPhysicalDeviceExternalFenceInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalFenceInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceExternalFenceInfo::sType; - }; - static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceExternalImageFormatInfo - { - protected: - PhysicalDeviceExternalImageFormatInfo( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : handleType( handleType_ ) - {} - - PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; - const void* pNext = nullptr; - vk::ExternalMemoryHandleTypeFlagBits handleType; - }; - static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceExternalImageFormatInfo : public layout::PhysicalDeviceExternalImageFormatInfo - { - PhysicalDeviceExternalImageFormatInfo( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) - : layout::PhysicalDeviceExternalImageFormatInfo( handleType_ ) - {} - - PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) - : layout::PhysicalDeviceExternalImageFormatInfo( rhs ) - {} - - PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalImageFormatInfo & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkPhysicalDeviceExternalImageFormatInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalImageFormatInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceExternalImageFormatInfo::sType; - }; - static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceExternalMemoryHostPropertiesEXT - { - protected: - PhysicalDeviceExternalMemoryHostPropertiesEXT( vk::DeviceSize minImportedHostPointerAlignment_ = 0 ) - : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) - {} - - PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; - void* pNext = nullptr; - vk::DeviceSize minImportedHostPointerAlignment; - }; - static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceExternalMemoryHostPropertiesEXT : public layout::PhysicalDeviceExternalMemoryHostPropertiesEXT - { - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); - } - - bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceExternalMemoryHostPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceExternalSemaphoreInfo - { - protected: - PhysicalDeviceExternalSemaphoreInfo( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) - : handleType( handleType_ ) - {} - - PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; - const void* pNext = nullptr; - vk::ExternalSemaphoreHandleTypeFlagBits handleType; - }; - static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceExternalSemaphoreInfo : public layout::PhysicalDeviceExternalSemaphoreInfo - { - PhysicalDeviceExternalSemaphoreInfo( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) - : layout::PhysicalDeviceExternalSemaphoreInfo( handleType_ ) - {} - - PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) - : layout::PhysicalDeviceExternalSemaphoreInfo( rhs ) - {} - - PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceExternalSemaphoreInfo & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkPhysicalDeviceExternalSemaphoreInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceExternalSemaphoreInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceExternalSemaphoreInfo::sType; - }; - static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceFeatures2 - { - protected: - PhysicalDeviceFeatures2( vk::PhysicalDeviceFeatures features_ = vk::PhysicalDeviceFeatures() ) - : features( features_ ) - {} - - PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceFeatures2; - void* pNext = nullptr; - vk::PhysicalDeviceFeatures features; - }; - static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceFeatures2 : public layout::PhysicalDeviceFeatures2 - { - PhysicalDeviceFeatures2( vk::PhysicalDeviceFeatures features_ = vk::PhysicalDeviceFeatures() ) - : layout::PhysicalDeviceFeatures2( features_ ) - {} - - PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) - : layout::PhysicalDeviceFeatures2( rhs ) - {} - - PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFeatures2 & setFeatures( vk::PhysicalDeviceFeatures features_ ) - { - features = features_; - return *this; - } - - operator VkPhysicalDeviceFeatures2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFeatures2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFeatures2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( features == rhs.features ); - } - - bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceFeatures2::sType; - }; - static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceFloatControlsPropertiesKHR - { - protected: - PhysicalDeviceFloatControlsPropertiesKHR( vk::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence_ = vk::ShaderFloatControlsIndependenceKHR::e32BitOnly, - vk::ShaderFloatControlsIndependenceKHR roundingModeIndependence_ = vk::ShaderFloatControlsIndependenceKHR::e32BitOnly, - vk::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = 0, - vk::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = 0, - vk::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = 0, - vk::Bool32 shaderDenormPreserveFloat16_ = 0, - vk::Bool32 shaderDenormPreserveFloat32_ = 0, - vk::Bool32 shaderDenormPreserveFloat64_ = 0, - vk::Bool32 shaderDenormFlushToZeroFloat16_ = 0, - vk::Bool32 shaderDenormFlushToZeroFloat32_ = 0, - vk::Bool32 shaderDenormFlushToZeroFloat64_ = 0, - vk::Bool32 shaderRoundingModeRTEFloat16_ = 0, - vk::Bool32 shaderRoundingModeRTEFloat32_ = 0, - vk::Bool32 shaderRoundingModeRTEFloat64_ = 0, - vk::Bool32 shaderRoundingModeRTZFloat16_ = 0, - vk::Bool32 shaderRoundingModeRTZFloat32_ = 0, - vk::Bool32 shaderRoundingModeRTZFloat64_ = 0 ) - : denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) - {} - - PhysicalDeviceFloatControlsPropertiesKHR( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFloatControlsPropertiesKHR& operator=( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceFloatControlsPropertiesKHR; - void* pNext = nullptr; - vk::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence; - vk::ShaderFloatControlsIndependenceKHR roundingModeIndependence; - vk::Bool32 shaderSignedZeroInfNanPreserveFloat16; - vk::Bool32 shaderSignedZeroInfNanPreserveFloat32; - vk::Bool32 shaderSignedZeroInfNanPreserveFloat64; - vk::Bool32 shaderDenormPreserveFloat16; - vk::Bool32 shaderDenormPreserveFloat32; - vk::Bool32 shaderDenormPreserveFloat64; - vk::Bool32 shaderDenormFlushToZeroFloat16; - vk::Bool32 shaderDenormFlushToZeroFloat32; - vk::Bool32 shaderDenormFlushToZeroFloat64; - vk::Bool32 shaderRoundingModeRTEFloat16; - vk::Bool32 shaderRoundingModeRTEFloat32; - vk::Bool32 shaderRoundingModeRTEFloat64; - vk::Bool32 shaderRoundingModeRTZFloat16; - vk::Bool32 shaderRoundingModeRTZFloat32; - vk::Bool32 shaderRoundingModeRTZFloat64; - }; - static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceFloatControlsPropertiesKHR : public layout::PhysicalDeviceFloatControlsPropertiesKHR - { - operator VkPhysicalDeviceFloatControlsPropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFloatControlsPropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) - && ( roundingModeIndependence == rhs.roundingModeIndependence ) - && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) - && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) - && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) - && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) - && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) - && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) - && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) - && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) - && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) - && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) - && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) - && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) - && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) - && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) - && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); - } - - bool operator!=( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceFloatControlsPropertiesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceFragmentDensityMapFeaturesEXT - { - protected: - PhysicalDeviceFragmentDensityMapFeaturesEXT( vk::Bool32 fragmentDensityMap_ = 0, - vk::Bool32 fragmentDensityMapDynamic_ = 0, - vk::Bool32 fragmentDensityMapNonSubsampledImages_ = 0 ) - : fragmentDensityMap( fragmentDensityMap_ ) - , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) - , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) - {} - - PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 fragmentDensityMap; - vk::Bool32 fragmentDensityMapDynamic; - vk::Bool32 fragmentDensityMapNonSubsampledImages; - }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceFragmentDensityMapFeaturesEXT : public layout::PhysicalDeviceFragmentDensityMapFeaturesEXT - { - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentDensityMap == rhs.fragmentDensityMap ) - && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) - && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); - } - - bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceFragmentDensityMapFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceFragmentDensityMapPropertiesEXT - { - protected: - PhysicalDeviceFragmentDensityMapPropertiesEXT( vk::Extent2D minFragmentDensityTexelSize_ = vk::Extent2D(), - vk::Extent2D maxFragmentDensityTexelSize_ = vk::Extent2D(), - vk::Bool32 fragmentDensityInvocations_ = 0 ) - : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) - , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) - , fragmentDensityInvocations( fragmentDensityInvocations_ ) - {} - - PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; - void* pNext = nullptr; - vk::Extent2D minFragmentDensityTexelSize; - vk::Extent2D maxFragmentDensityTexelSize; - vk::Bool32 fragmentDensityInvocations; - }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceFragmentDensityMapPropertiesEXT : public layout::PhysicalDeviceFragmentDensityMapPropertiesEXT - { - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) - && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) - && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); - } - - bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceFragmentDensityMapPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV - { - protected: - PhysicalDeviceFragmentShaderBarycentricFeaturesNV( vk::Bool32 fragmentShaderBarycentric_ = 0 ) - : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) - {} - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; - void* pNext = nullptr; - vk::Bool32 fragmentShaderBarycentric; - }; - static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV : public layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV - { - PhysicalDeviceFragmentShaderBarycentricFeaturesNV( vk::Bool32 fragmentShaderBarycentric_ = 0 ) - : layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV( fragmentShaderBarycentric_ ) - {} - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) - : layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV( rhs ) - {} - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( vk::Bool32 fragmentShaderBarycentric_ ) - { - fragmentShaderBarycentric = fragmentShaderBarycentric_; - return *this; - } - - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); - } - - bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT - { - protected: - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( vk::Bool32 fragmentShaderSampleInterlock_ = 0, - vk::Bool32 fragmentShaderPixelInterlock_ = 0, - vk::Bool32 fragmentShaderShadingRateInterlock_ = 0 ) - : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) - , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) - , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) - {} - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 fragmentShaderSampleInterlock; - vk::Bool32 fragmentShaderPixelInterlock; - vk::Bool32 fragmentShaderShadingRateInterlock; - }; - static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT : public layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT - { - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( vk::Bool32 fragmentShaderSampleInterlock_ = 0, - vk::Bool32 fragmentShaderPixelInterlock_ = 0, - vk::Bool32 fragmentShaderShadingRateInterlock_ = 0 ) - : layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT( fragmentShaderSampleInterlock_, fragmentShaderPixelInterlock_, fragmentShaderShadingRateInterlock_ ) - {} - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) - : layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT( rhs ) - {} - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( vk::Bool32 fragmentShaderSampleInterlock_ ) - { - fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( vk::Bool32 fragmentShaderPixelInterlock_ ) - { - fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; - return *this; - } - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( vk::Bool32 fragmentShaderShadingRateInterlock_ ) - { - fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; - return *this; - } - - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) - && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) - && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); - } - - bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceGroupProperties - { - protected: - PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = 0, - std::array const& physicalDevices_ = { { vk::PhysicalDevice() } }, - vk::Bool32 subsetAllocation_ = 0 ) - : physicalDeviceCount( physicalDeviceCount_ ) - , subsetAllocation( subsetAllocation_ ) - { - memcpy( &physicalDevices, physicalDevices_.data(), VK_MAX_DEVICE_GROUP_SIZE * sizeof( vk::PhysicalDevice ) ); - - } - - PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; - void* pNext = nullptr; - uint32_t physicalDeviceCount; - vk::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; - vk::Bool32 subsetAllocation; - }; - static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceGroupProperties : public layout::PhysicalDeviceGroupProperties - { - operator VkPhysicalDeviceGroupProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceGroupProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceGroupProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( physicalDeviceCount == rhs.physicalDeviceCount ) - && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( vk::PhysicalDevice ) ) == 0 ) - && ( subsetAllocation == rhs.subsetAllocation ); - } - - bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceGroupProperties::sType; - }; - static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceHostQueryResetFeaturesEXT - { - protected: - PhysicalDeviceHostQueryResetFeaturesEXT( vk::Bool32 hostQueryReset_ = 0 ) - : hostQueryReset( hostQueryReset_ ) - {} - - PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 hostQueryReset; - }; - static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceHostQueryResetFeaturesEXT : public layout::PhysicalDeviceHostQueryResetFeaturesEXT - { - PhysicalDeviceHostQueryResetFeaturesEXT( vk::Bool32 hostQueryReset_ = 0 ) - : layout::PhysicalDeviceHostQueryResetFeaturesEXT( hostQueryReset_ ) - {} - - PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) - : layout::PhysicalDeviceHostQueryResetFeaturesEXT( rhs ) - {} - - PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceHostQueryResetFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceHostQueryResetFeaturesEXT & setHostQueryReset( vk::Bool32 hostQueryReset_ ) - { - hostQueryReset = hostQueryReset_; - return *this; - } - - operator VkPhysicalDeviceHostQueryResetFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceHostQueryResetFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( hostQueryReset == rhs.hostQueryReset ); - } - - bool operator!=( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceHostQueryResetFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceIDProperties - { - protected: - PhysicalDeviceIDProperties( std::array const& deviceUUID_ = { { 0 } }, - std::array const& driverUUID_ = { { 0 } }, - std::array const& deviceLUID_ = { { 0 } }, - uint32_t deviceNodeMask_ = 0, - vk::Bool32 deviceLUIDValid_ = 0 ) - : deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) - { - memcpy( &deviceUUID, deviceUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) ); - memcpy( &driverUUID, driverUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) ); - memcpy( &deviceLUID, deviceLUID_.data(), VK_LUID_SIZE * sizeof( uint8_t ) ); - - } - - PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceIdProperties; - void* pNext = nullptr; - uint8_t deviceUUID[VK_UUID_SIZE]; - uint8_t driverUUID[VK_UUID_SIZE]; - uint8_t deviceLUID[VK_LUID_SIZE]; - uint32_t deviceNodeMask; - vk::Bool32 deviceLUIDValid; - }; - static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceIDProperties : public layout::PhysicalDeviceIDProperties - { - operator VkPhysicalDeviceIDProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceIDProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceIDProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( deviceNodeMask == rhs.deviceNodeMask ) - && ( deviceLUIDValid == rhs.deviceLUIDValid ); - } - - bool operator!=( PhysicalDeviceIDProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceIDProperties::sType; - }; - static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceImageDrmFormatModifierInfoEXT - { - protected: - PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0, - vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr ) - : drmFormatModifier( drmFormatModifier_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - {} - - PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; - const void* pNext = nullptr; - uint64_t drmFormatModifier; - vk::SharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - }; - static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceImageDrmFormatModifierInfoEXT : public layout::PhysicalDeviceImageDrmFormatModifierInfoEXT - { - PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0, - vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr ) - : layout::PhysicalDeviceImageDrmFormatModifierInfoEXT( drmFormatModifier_, sharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_ ) - {} - - PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) - : layout::PhysicalDeviceImageDrmFormatModifierInfoEXT( rhs ) - {} - - PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( vk::SharingMode sharingMode_ ) - { - sharingMode = sharingMode_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( drmFormatModifier == rhs.drmFormatModifier ) - && ( sharingMode == rhs.sharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); - } - - bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceImageDrmFormatModifierInfoEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceImageFormatInfo2 - { - protected: - PhysicalDeviceImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined, - vk::ImageType type_ = vk::ImageType::e1D, - vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal, - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - vk::ImageCreateFlags flags_ = vk::ImageCreateFlags() ) - : format( format_ ) - , type( type_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , flags( flags_ ) - {} - - PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; - const void* pNext = nullptr; - vk::Format format; - vk::ImageType type; - vk::ImageTiling tiling; - vk::ImageUsageFlags usage; - vk::ImageCreateFlags flags; - }; - static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceImageFormatInfo2 : public layout::PhysicalDeviceImageFormatInfo2 - { - PhysicalDeviceImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined, - vk::ImageType type_ = vk::ImageType::e1D, - vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal, - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - vk::ImageCreateFlags flags_ = vk::ImageCreateFlags() ) - : layout::PhysicalDeviceImageFormatInfo2( format_, type_, tiling_, usage_, flags_ ) - {} - - PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) - : layout::PhysicalDeviceImageFormatInfo2( rhs ) - {} - - PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImageFormatInfo2 & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - PhysicalDeviceImageFormatInfo2 & setType( vk::ImageType type_ ) - { - type = type_; - return *this; - } - - PhysicalDeviceImageFormatInfo2 & setTiling( vk::ImageTiling tiling_ ) - { - tiling = tiling_; - return *this; - } - - PhysicalDeviceImageFormatInfo2 & setUsage( vk::ImageUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - PhysicalDeviceImageFormatInfo2 & setFlags( vk::ImageCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - operator VkPhysicalDeviceImageFormatInfo2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImageFormatInfo2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( tiling == rhs.tiling ) - && ( usage == rhs.usage ) - && ( flags == rhs.flags ); - } - - bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceImageFormatInfo2::sType; - }; - static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceImageViewImageFormatInfoEXT - { - protected: - PhysicalDeviceImageViewImageFormatInfoEXT( vk::ImageViewType imageViewType_ = vk::ImageViewType::e1D ) - : imageViewType( imageViewType_ ) - {} - - PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; - void* pNext = nullptr; - vk::ImageViewType imageViewType; - }; - static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceImageViewImageFormatInfoEXT : public layout::PhysicalDeviceImageViewImageFormatInfoEXT - { - PhysicalDeviceImageViewImageFormatInfoEXT( vk::ImageViewType imageViewType_ = vk::ImageViewType::e1D ) - : layout::PhysicalDeviceImageViewImageFormatInfoEXT( imageViewType_ ) - {} - - PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) - : layout::PhysicalDeviceImageViewImageFormatInfoEXT( rhs ) - {} - - PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( vk::ImageViewType imageViewType_ ) - { - imageViewType = imageViewType_; - return *this; - } - - operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageViewType == rhs.imageViewType ); - } - - bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceImageViewImageFormatInfoEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceImagelessFramebufferFeaturesKHR - { - protected: - PhysicalDeviceImagelessFramebufferFeaturesKHR( vk::Bool32 imagelessFramebuffer_ = 0 ) - : imagelessFramebuffer( imagelessFramebuffer_ ) - {} - - PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR; - void* pNext = nullptr; - vk::Bool32 imagelessFramebuffer; - }; - static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceImagelessFramebufferFeaturesKHR : public layout::PhysicalDeviceImagelessFramebufferFeaturesKHR - { - PhysicalDeviceImagelessFramebufferFeaturesKHR( vk::Bool32 imagelessFramebuffer_ = 0 ) - : layout::PhysicalDeviceImagelessFramebufferFeaturesKHR( imagelessFramebuffer_ ) - {} - - PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) - : layout::PhysicalDeviceImagelessFramebufferFeaturesKHR( rhs ) - {} - - PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceImagelessFramebufferFeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceImagelessFramebufferFeaturesKHR & setImagelessFramebuffer( vk::Bool32 imagelessFramebuffer_ ) - { - imagelessFramebuffer = imagelessFramebuffer_; - return *this; - } - - operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); - } - - bool operator!=( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceImagelessFramebufferFeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceIndexTypeUint8FeaturesEXT - { - protected: - PhysicalDeviceIndexTypeUint8FeaturesEXT( vk::Bool32 indexTypeUint8_ = 0 ) - : indexTypeUint8( indexTypeUint8_ ) - {} - - PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; - void* pNext = nullptr; - vk::Bool32 indexTypeUint8; - }; - static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceIndexTypeUint8FeaturesEXT : public layout::PhysicalDeviceIndexTypeUint8FeaturesEXT - { - PhysicalDeviceIndexTypeUint8FeaturesEXT( vk::Bool32 indexTypeUint8_ = 0 ) - : layout::PhysicalDeviceIndexTypeUint8FeaturesEXT( indexTypeUint8_ ) - {} - - PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) - : layout::PhysicalDeviceIndexTypeUint8FeaturesEXT( rhs ) - {} - - PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( vk::Bool32 indexTypeUint8_ ) - { - indexTypeUint8 = indexTypeUint8_; - return *this; - } - - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( indexTypeUint8 == rhs.indexTypeUint8 ); - } - - bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceIndexTypeUint8FeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceInlineUniformBlockFeaturesEXT - { - protected: - PhysicalDeviceInlineUniformBlockFeaturesEXT( vk::Bool32 inlineUniformBlock_ = 0, - vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = 0 ) - : inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) - {} - - PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 inlineUniformBlock; - vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind; - }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceInlineUniformBlockFeaturesEXT : public layout::PhysicalDeviceInlineUniformBlockFeaturesEXT - { - PhysicalDeviceInlineUniformBlockFeaturesEXT( vk::Bool32 inlineUniformBlock_ = 0, - vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = 0 ) - : layout::PhysicalDeviceInlineUniformBlockFeaturesEXT( inlineUniformBlock_, descriptorBindingInlineUniformBlockUpdateAfterBind_ ) - {} - - PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) - : layout::PhysicalDeviceInlineUniformBlockFeaturesEXT( rhs ) - {} - - PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( vk::Bool32 inlineUniformBlock_ ) - { - inlineUniformBlock = inlineUniformBlock_; - return *this; - } - - PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) - { - descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; - return *this; - } - - operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( inlineUniformBlock == rhs.inlineUniformBlock ) - && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); - } - - bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceInlineUniformBlockFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceInlineUniformBlockPropertiesEXT - { - protected: - PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = 0, - uint32_t maxPerStageDescriptorInlineUniformBlocks_ = 0, - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = 0, - uint32_t maxDescriptorSetInlineUniformBlocks_ = 0, - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = 0 ) - : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) - {} - - PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; - void* pNext = nullptr; - uint32_t maxInlineUniformBlockSize; - uint32_t maxPerStageDescriptorInlineUniformBlocks; - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; - uint32_t maxDescriptorSetInlineUniformBlocks; - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; - }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceInlineUniformBlockPropertiesEXT : public layout::PhysicalDeviceInlineUniformBlockPropertiesEXT - { - operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) - && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) - && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) - && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) - && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); - } - - bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceInlineUniformBlockPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceLimits - { - operator VkPhysicalDeviceLimits const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceLimits &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceLimits const& rhs ) const - { - return ( maxImageDimension1D == rhs.maxImageDimension1D ) - && ( maxImageDimension2D == rhs.maxImageDimension2D ) - && ( maxImageDimension3D == rhs.maxImageDimension3D ) - && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) - && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) - && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) - && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) - && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) - && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) - && ( bufferImageGranularity == rhs.bufferImageGranularity ) - && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) - && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) - && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) - && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) - && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) - && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) - && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) - && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) - && ( maxPerStageResources == rhs.maxPerStageResources ) - && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) - && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) - && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) - && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) - && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) - && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) - && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) - && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) - && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) - && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) - && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) - && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) - && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) - && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) - && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) - && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) - && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) - && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) - && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) - && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) - && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) - && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) - && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) - && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) - && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) - && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) - && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) - && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) - && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) - && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) - && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) - && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 ) - && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) - && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) - && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) - && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) - && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) - && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) - && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) - && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) - && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) - && ( maxViewports == rhs.maxViewports ) - && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 ) - && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 ) - && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) - && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) - && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) - && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) - && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) - && ( minTexelOffset == rhs.minTexelOffset ) - && ( maxTexelOffset == rhs.maxTexelOffset ) - && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) - && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) - && ( minInterpolationOffset == rhs.minInterpolationOffset ) - && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) - && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) - && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) - && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) - && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) - && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) - && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) - && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) - && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) - && ( maxColorAttachments == rhs.maxColorAttachments ) - && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) - && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) - && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) - && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) - && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) - && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) - && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) - && ( timestampPeriod == rhs.timestampPeriod ) - && ( maxClipDistances == rhs.maxClipDistances ) - && ( maxCullDistances == rhs.maxCullDistances ) - && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) - && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) - && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 ) - && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 ) - && ( pointSizeGranularity == rhs.pointSizeGranularity ) - && ( lineWidthGranularity == rhs.lineWidthGranularity ) - && ( strictLines == rhs.strictLines ) - && ( standardSampleLocations == rhs.standardSampleLocations ) - && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) - && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) - && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); - } - - bool operator!=( PhysicalDeviceLimits const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t maxImageDimension1D; - uint32_t maxImageDimension2D; - uint32_t maxImageDimension3D; - uint32_t maxImageDimensionCube; - uint32_t maxImageArrayLayers; - uint32_t maxTexelBufferElements; - uint32_t maxUniformBufferRange; - uint32_t maxStorageBufferRange; - uint32_t maxPushConstantsSize; - uint32_t maxMemoryAllocationCount; - uint32_t maxSamplerAllocationCount; - vk::DeviceSize bufferImageGranularity; - vk::DeviceSize sparseAddressSpaceSize; - uint32_t maxBoundDescriptorSets; - uint32_t maxPerStageDescriptorSamplers; - uint32_t maxPerStageDescriptorUniformBuffers; - uint32_t maxPerStageDescriptorStorageBuffers; - uint32_t maxPerStageDescriptorSampledImages; - uint32_t maxPerStageDescriptorStorageImages; - uint32_t maxPerStageDescriptorInputAttachments; - uint32_t maxPerStageResources; - uint32_t maxDescriptorSetSamplers; - uint32_t maxDescriptorSetUniformBuffers; - uint32_t maxDescriptorSetUniformBuffersDynamic; - uint32_t maxDescriptorSetStorageBuffers; - uint32_t maxDescriptorSetStorageBuffersDynamic; - uint32_t maxDescriptorSetSampledImages; - uint32_t maxDescriptorSetStorageImages; - uint32_t maxDescriptorSetInputAttachments; - uint32_t maxVertexInputAttributes; - uint32_t maxVertexInputBindings; - uint32_t maxVertexInputAttributeOffset; - uint32_t maxVertexInputBindingStride; - uint32_t maxVertexOutputComponents; - uint32_t maxTessellationGenerationLevel; - uint32_t maxTessellationPatchSize; - uint32_t maxTessellationControlPerVertexInputComponents; - uint32_t maxTessellationControlPerVertexOutputComponents; - uint32_t maxTessellationControlPerPatchOutputComponents; - uint32_t maxTessellationControlTotalOutputComponents; - uint32_t maxTessellationEvaluationInputComponents; - uint32_t maxTessellationEvaluationOutputComponents; - uint32_t maxGeometryShaderInvocations; - uint32_t maxGeometryInputComponents; - uint32_t maxGeometryOutputComponents; - uint32_t maxGeometryOutputVertices; - uint32_t maxGeometryTotalOutputComponents; - uint32_t maxFragmentInputComponents; - uint32_t maxFragmentOutputAttachments; - uint32_t maxFragmentDualSrcAttachments; - uint32_t maxFragmentCombinedOutputResources; - uint32_t maxComputeSharedMemorySize; - uint32_t maxComputeWorkGroupCount[3]; - uint32_t maxComputeWorkGroupInvocations; - uint32_t maxComputeWorkGroupSize[3]; - uint32_t subPixelPrecisionBits; - uint32_t subTexelPrecisionBits; - uint32_t mipmapPrecisionBits; - uint32_t maxDrawIndexedIndexValue; - uint32_t maxDrawIndirectCount; - float maxSamplerLodBias; - float maxSamplerAnisotropy; - uint32_t maxViewports; - uint32_t maxViewportDimensions[2]; - float viewportBoundsRange[2]; - uint32_t viewportSubPixelBits; - size_t minMemoryMapAlignment; - vk::DeviceSize minTexelBufferOffsetAlignment; - vk::DeviceSize minUniformBufferOffsetAlignment; - vk::DeviceSize minStorageBufferOffsetAlignment; - int32_t minTexelOffset; - uint32_t maxTexelOffset; - int32_t minTexelGatherOffset; - uint32_t maxTexelGatherOffset; - float minInterpolationOffset; - float maxInterpolationOffset; - uint32_t subPixelInterpolationOffsetBits; - uint32_t maxFramebufferWidth; - uint32_t maxFramebufferHeight; - uint32_t maxFramebufferLayers; - vk::SampleCountFlags framebufferColorSampleCounts; - vk::SampleCountFlags framebufferDepthSampleCounts; - vk::SampleCountFlags framebufferStencilSampleCounts; - vk::SampleCountFlags framebufferNoAttachmentsSampleCounts; - uint32_t maxColorAttachments; - vk::SampleCountFlags sampledImageColorSampleCounts; - vk::SampleCountFlags sampledImageIntegerSampleCounts; - vk::SampleCountFlags sampledImageDepthSampleCounts; - vk::SampleCountFlags sampledImageStencilSampleCounts; - vk::SampleCountFlags storageImageSampleCounts; - uint32_t maxSampleMaskWords; - vk::Bool32 timestampComputeAndGraphics; - float timestampPeriod; - uint32_t maxClipDistances; - uint32_t maxCullDistances; - uint32_t maxCombinedClipAndCullDistances; - uint32_t discreteQueuePriorities; - float pointSizeRange[2]; - float lineWidthRange[2]; - float pointSizeGranularity; - float lineWidthGranularity; - vk::Bool32 strictLines; - vk::Bool32 standardSampleLocations; - vk::DeviceSize optimalBufferCopyOffsetAlignment; - vk::DeviceSize optimalBufferCopyRowPitchAlignment; - vk::DeviceSize nonCoherentAtomSize; - }; - static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceLineRasterizationFeaturesEXT - { - protected: - PhysicalDeviceLineRasterizationFeaturesEXT( vk::Bool32 rectangularLines_ = 0, - vk::Bool32 bresenhamLines_ = 0, - vk::Bool32 smoothLines_ = 0, - vk::Bool32 stippledRectangularLines_ = 0, - vk::Bool32 stippledBresenhamLines_ = 0, - vk::Bool32 stippledSmoothLines_ = 0 ) - : rectangularLines( rectangularLines_ ) - , bresenhamLines( bresenhamLines_ ) - , smoothLines( smoothLines_ ) - , stippledRectangularLines( stippledRectangularLines_ ) - , stippledBresenhamLines( stippledBresenhamLines_ ) - , stippledSmoothLines( stippledSmoothLines_ ) - {} - - PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 rectangularLines; - vk::Bool32 bresenhamLines; - vk::Bool32 smoothLines; - vk::Bool32 stippledRectangularLines; - vk::Bool32 stippledBresenhamLines; - vk::Bool32 stippledSmoothLines; - }; - static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceLineRasterizationFeaturesEXT : public layout::PhysicalDeviceLineRasterizationFeaturesEXT - { - PhysicalDeviceLineRasterizationFeaturesEXT( vk::Bool32 rectangularLines_ = 0, - vk::Bool32 bresenhamLines_ = 0, - vk::Bool32 smoothLines_ = 0, - vk::Bool32 stippledRectangularLines_ = 0, - vk::Bool32 stippledBresenhamLines_ = 0, - vk::Bool32 stippledSmoothLines_ = 0 ) - : layout::PhysicalDeviceLineRasterizationFeaturesEXT( rectangularLines_, bresenhamLines_, smoothLines_, stippledRectangularLines_, stippledBresenhamLines_, stippledSmoothLines_ ) - {} - - PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) - : layout::PhysicalDeviceLineRasterizationFeaturesEXT( rhs ) - {} - - PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( vk::Bool32 rectangularLines_ ) - { - rectangularLines = rectangularLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( vk::Bool32 bresenhamLines_ ) - { - bresenhamLines = bresenhamLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( vk::Bool32 smoothLines_ ) - { - smoothLines = smoothLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( vk::Bool32 stippledRectangularLines_ ) - { - stippledRectangularLines = stippledRectangularLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( vk::Bool32 stippledBresenhamLines_ ) - { - stippledBresenhamLines = stippledBresenhamLines_; - return *this; - } - - PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( vk::Bool32 stippledSmoothLines_ ) - { - stippledSmoothLines = stippledSmoothLines_; - return *this; - } - - operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rectangularLines == rhs.rectangularLines ) - && ( bresenhamLines == rhs.bresenhamLines ) - && ( smoothLines == rhs.smoothLines ) - && ( stippledRectangularLines == rhs.stippledRectangularLines ) - && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) - && ( stippledSmoothLines == rhs.stippledSmoothLines ); - } - - bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceLineRasterizationFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceLineRasterizationPropertiesEXT - { - protected: - PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = 0 ) - : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) - {} - - PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; - void* pNext = nullptr; - uint32_t lineSubPixelPrecisionBits; - }; - static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceLineRasterizationPropertiesEXT : public layout::PhysicalDeviceLineRasterizationPropertiesEXT - { - operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); - } - - bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceLineRasterizationPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMaintenance3Properties - { - protected: - PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = 0, - vk::DeviceSize maxMemoryAllocationSize_ = 0 ) - : maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) - {} - - PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; - void* pNext = nullptr; - uint32_t maxPerSetDescriptors; - vk::DeviceSize maxMemoryAllocationSize; - }; - static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMaintenance3Properties : public layout::PhysicalDeviceMaintenance3Properties - { - operator VkPhysicalDeviceMaintenance3Properties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMaintenance3Properties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) - && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); - } - - bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMaintenance3Properties::sType; - }; - static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMemoryBudgetPropertiesEXT - { - protected: - PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const& heapBudget_ = { { 0 } }, - std::array const& heapUsage_ = { { 0 } } ) - { - memcpy( &heapBudget, heapBudget_.data(), VK_MAX_MEMORY_HEAPS * sizeof( vk::DeviceSize ) ); - memcpy( &heapUsage, heapUsage_.data(), VK_MAX_MEMORY_HEAPS * sizeof( vk::DeviceSize ) ); - - } - - PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; - void* pNext = nullptr; - vk::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; - vk::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; - }; - static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMemoryBudgetPropertiesEXT : public layout::PhysicalDeviceMemoryBudgetPropertiesEXT - { - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( vk::DeviceSize ) ) == 0 ) - && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( vk::DeviceSize ) ) == 0 ); - } - - bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMemoryBudgetPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMemoryPriorityFeaturesEXT - { - protected: - PhysicalDeviceMemoryPriorityFeaturesEXT( vk::Bool32 memoryPriority_ = 0 ) - : memoryPriority( memoryPriority_ ) - {} - - PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 memoryPriority; - }; - static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMemoryPriorityFeaturesEXT : public layout::PhysicalDeviceMemoryPriorityFeaturesEXT - { - PhysicalDeviceMemoryPriorityFeaturesEXT( vk::Bool32 memoryPriority_ = 0 ) - : layout::PhysicalDeviceMemoryPriorityFeaturesEXT( memoryPriority_ ) - {} - - PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) - : layout::PhysicalDeviceMemoryPriorityFeaturesEXT( rhs ) - {} - - PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( vk::Bool32 memoryPriority_ ) - { - memoryPriority = memoryPriority_; - return *this; - } - - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryPriority == rhs.memoryPriority ); - } - - bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMemoryPriorityFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceMemoryProperties - { - operator VkPhysicalDeviceMemoryProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const - { - return ( memoryTypeCount == rhs.memoryTypeCount ) - && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( vk::MemoryType ) ) == 0 ) - && ( memoryHeapCount == rhs.memoryHeapCount ) - && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( vk::MemoryHeap ) ) == 0 ); - } - - bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t memoryTypeCount; - vk::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; - uint32_t memoryHeapCount; - vk::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; - }; - static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMemoryProperties2 - { - protected: - PhysicalDeviceMemoryProperties2( vk::PhysicalDeviceMemoryProperties memoryProperties_ = vk::PhysicalDeviceMemoryProperties() ) - : memoryProperties( memoryProperties_ ) - {} - - PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; - void* pNext = nullptr; - vk::PhysicalDeviceMemoryProperties memoryProperties; - }; - static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMemoryProperties2 : public layout::PhysicalDeviceMemoryProperties2 - { - operator VkPhysicalDeviceMemoryProperties2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMemoryProperties2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryProperties == rhs.memoryProperties ); - } - - bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMemoryProperties2::sType; - }; - static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMeshShaderFeaturesNV - { - protected: - PhysicalDeviceMeshShaderFeaturesNV( vk::Bool32 taskShader_ = 0, - vk::Bool32 meshShader_ = 0 ) - : taskShader( taskShader_ ) - , meshShader( meshShader_ ) - {} - - PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; - void* pNext = nullptr; - vk::Bool32 taskShader; - vk::Bool32 meshShader; - }; - static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMeshShaderFeaturesNV : public layout::PhysicalDeviceMeshShaderFeaturesNV - { - PhysicalDeviceMeshShaderFeaturesNV( vk::Bool32 taskShader_ = 0, - vk::Bool32 meshShader_ = 0 ) - : layout::PhysicalDeviceMeshShaderFeaturesNV( taskShader_, meshShader_ ) - {} - - PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) - : layout::PhysicalDeviceMeshShaderFeaturesNV( rhs ) - {} - - PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( vk::Bool32 taskShader_ ) - { - taskShader = taskShader_; - return *this; - } - - PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( vk::Bool32 meshShader_ ) - { - meshShader = meshShader_; - return *this; - } - - operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMeshShaderFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( taskShader == rhs.taskShader ) - && ( meshShader == rhs.meshShader ); - } - - bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMeshShaderFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMeshShaderPropertiesNV - { - protected: - PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = 0, - uint32_t maxTaskWorkGroupInvocations_ = 0, - std::array const& maxTaskWorkGroupSize_ = { { 0 } }, - uint32_t maxTaskTotalMemorySize_ = 0, - uint32_t maxTaskOutputCount_ = 0, - uint32_t maxMeshWorkGroupInvocations_ = 0, - std::array const& maxMeshWorkGroupSize_ = { { 0 } }, - uint32_t maxMeshTotalMemorySize_ = 0, - uint32_t maxMeshOutputVertices_ = 0, - uint32_t maxMeshOutputPrimitives_ = 0, - uint32_t maxMeshMultiviewViewCount_ = 0, - uint32_t meshOutputPerVertexGranularity_ = 0, - uint32_t meshOutputPerPrimitiveGranularity_ = 0 ) - : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) - , maxTaskOutputCount( maxTaskOutputCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) - { - memcpy( &maxTaskWorkGroupSize, maxTaskWorkGroupSize_.data(), 3 * sizeof( uint32_t ) ); - memcpy( &maxMeshWorkGroupSize, maxMeshWorkGroupSize_.data(), 3 * sizeof( uint32_t ) ); - - } - - PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; - void* pNext = nullptr; - uint32_t maxDrawMeshTasksCount; - uint32_t maxTaskWorkGroupInvocations; - uint32_t maxTaskWorkGroupSize[3]; - uint32_t maxTaskTotalMemorySize; - uint32_t maxTaskOutputCount; - uint32_t maxMeshWorkGroupInvocations; - uint32_t maxMeshWorkGroupSize[3]; - uint32_t maxMeshTotalMemorySize; - uint32_t maxMeshOutputVertices; - uint32_t maxMeshOutputPrimitives; - uint32_t maxMeshMultiviewViewCount; - uint32_t meshOutputPerVertexGranularity; - uint32_t meshOutputPerPrimitiveGranularity; - }; - static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMeshShaderPropertiesNV : public layout::PhysicalDeviceMeshShaderPropertiesNV - { - operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMeshShaderPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) - && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) - && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) - && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) - && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) - && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) - && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) - && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) - && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) - && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) - && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) - && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) - && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); - } - - bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMeshShaderPropertiesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMultiviewFeatures - { - protected: - PhysicalDeviceMultiviewFeatures( vk::Bool32 multiview_ = 0, - vk::Bool32 multiviewGeometryShader_ = 0, - vk::Bool32 multiviewTessellationShader_ = 0 ) - : multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) - {} - - PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; - void* pNext = nullptr; - vk::Bool32 multiview; - vk::Bool32 multiviewGeometryShader; - vk::Bool32 multiviewTessellationShader; - }; - static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMultiviewFeatures : public layout::PhysicalDeviceMultiviewFeatures - { - PhysicalDeviceMultiviewFeatures( vk::Bool32 multiview_ = 0, - vk::Bool32 multiviewGeometryShader_ = 0, - vk::Bool32 multiviewTessellationShader_ = 0 ) - : layout::PhysicalDeviceMultiviewFeatures( multiview_, multiviewGeometryShader_, multiviewTessellationShader_ ) - {} - - PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) - : layout::PhysicalDeviceMultiviewFeatures( rhs ) - {} - - PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setMultiview( vk::Bool32 multiview_ ) - { - multiview = multiview_; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( vk::Bool32 multiviewGeometryShader_ ) - { - multiviewGeometryShader = multiviewGeometryShader_; - return *this; - } - - PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( vk::Bool32 multiviewTessellationShader_ ) - { - multiviewTessellationShader = multiviewTessellationShader_; - return *this; - } - - operator VkPhysicalDeviceMultiviewFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultiviewFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( multiview == rhs.multiview ) - && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) - && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); - } - - bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMultiviewFeatures::sType; - }; - static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX - { - protected: - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( vk::Bool32 perViewPositionAllComponents_ = 0 ) - : perViewPositionAllComponents( perViewPositionAllComponents_ ) - {} - - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - void* pNext = nullptr; - vk::Bool32 perViewPositionAllComponents; - }; - static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : public layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX - { - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); - } - - bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::sType; - }; - static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceMultiviewProperties - { - protected: - PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = 0, - uint32_t maxMultiviewInstanceIndex_ = 0 ) - : maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) - {} - - PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; - void* pNext = nullptr; - uint32_t maxMultiviewViewCount; - uint32_t maxMultiviewInstanceIndex; - }; - static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceMultiviewProperties : public layout::PhysicalDeviceMultiviewProperties - { - operator VkPhysicalDeviceMultiviewProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultiviewProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) - && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); - } - - bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceMultiviewProperties::sType; - }; - static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDevicePCIBusInfoPropertiesEXT - { - protected: - PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = 0, - uint32_t pciBus_ = 0, - uint32_t pciDevice_ = 0, - uint32_t pciFunction_ = 0 ) - : pciDomain( pciDomain_ ) - , pciBus( pciBus_ ) - , pciDevice( pciDevice_ ) - , pciFunction( pciFunction_ ) - {} - - PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; - void* pNext = nullptr; - uint32_t pciDomain; - uint32_t pciBus; - uint32_t pciDevice; - uint32_t pciFunction; - }; - static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDevicePCIBusInfoPropertiesEXT : public layout::PhysicalDevicePCIBusInfoPropertiesEXT - { - operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pciDomain == rhs.pciDomain ) - && ( pciBus == rhs.pciBus ) - && ( pciDevice == rhs.pciDevice ) - && ( pciFunction == rhs.pciFunction ); - } - - bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDevicePCIBusInfoPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR - { - protected: - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( vk::Bool32 pipelineExecutableInfo_ = 0 ) - : pipelineExecutableInfo( pipelineExecutableInfo_ ) - {} - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - void* pNext = nullptr; - vk::Bool32 pipelineExecutableInfo; - }; - static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR : public layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR - { - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( vk::Bool32 pipelineExecutableInfo_ = 0 ) - : layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( pipelineExecutableInfo_ ) - {} - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) - : layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( rhs ) - {} - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( vk::Bool32 pipelineExecutableInfo_ ) - { - pipelineExecutableInfo = pipelineExecutableInfo_; - return *this; - } - - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); - } - - bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDevicePointClippingProperties - { - protected: - PhysicalDevicePointClippingProperties( vk::PointClippingBehavior pointClippingBehavior_ = vk::PointClippingBehavior::eAllClipPlanes ) - : pointClippingBehavior( pointClippingBehavior_ ) - {} - - PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; - void* pNext = nullptr; - vk::PointClippingBehavior pointClippingBehavior; - }; - static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDevicePointClippingProperties : public layout::PhysicalDevicePointClippingProperties - { - operator VkPhysicalDevicePointClippingProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePointClippingProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pointClippingBehavior == rhs.pointClippingBehavior ); - } - - bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDevicePointClippingProperties::sType; - }; - static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceSparseProperties - { - operator VkPhysicalDeviceSparseProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSparseProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSparseProperties const& rhs ) const - { - return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) - && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) - && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) - && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) - && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); - } - - bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Bool32 residencyStandard2DBlockShape; - vk::Bool32 residencyStandard2DMultisampleBlockShape; - vk::Bool32 residencyStandard3DBlockShape; - vk::Bool32 residencyAlignedMipSize; - vk::Bool32 residencyNonResidentStrict; - }; - static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PhysicalDeviceProperties - { - operator VkPhysicalDeviceProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceProperties const& rhs ) const - { - return ( apiVersion == rhs.apiVersion ) - && ( driverVersion == rhs.driverVersion ) - && ( vendorID == rhs.vendorID ) - && ( deviceID == rhs.deviceID ) - && ( deviceType == rhs.deviceType ) - && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) - && ( limits == rhs.limits ) - && ( sparseProperties == rhs.sparseProperties ); - } - - bool operator!=( PhysicalDeviceProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t apiVersion; - uint32_t driverVersion; - uint32_t vendorID; - uint32_t deviceID; - vk::PhysicalDeviceType deviceType; - char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; - uint8_t pipelineCacheUUID[VK_UUID_SIZE]; - vk::PhysicalDeviceLimits limits; - vk::PhysicalDeviceSparseProperties sparseProperties; - }; - static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceProperties2 - { - protected: - PhysicalDeviceProperties2( vk::PhysicalDeviceProperties properties_ = vk::PhysicalDeviceProperties() ) - : properties( properties_ ) - {} - - PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceProperties2; - void* pNext = nullptr; - vk::PhysicalDeviceProperties properties; - }; - static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceProperties2 : public layout::PhysicalDeviceProperties2 - { - operator VkPhysicalDeviceProperties2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProperties2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceProperties2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); - } - - bool operator!=( PhysicalDeviceProperties2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceProperties2::sType; - }; - static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceProtectedMemoryFeatures - { - protected: - PhysicalDeviceProtectedMemoryFeatures( vk::Bool32 protectedMemory_ = 0 ) - : protectedMemory( protectedMemory_ ) - {} - - PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; - void* pNext = nullptr; - vk::Bool32 protectedMemory; - }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceProtectedMemoryFeatures : public layout::PhysicalDeviceProtectedMemoryFeatures - { - PhysicalDeviceProtectedMemoryFeatures( vk::Bool32 protectedMemory_ = 0 ) - : layout::PhysicalDeviceProtectedMemoryFeatures( protectedMemory_ ) - {} - - PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) - : layout::PhysicalDeviceProtectedMemoryFeatures( rhs ) - {} - - PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( vk::Bool32 protectedMemory_ ) - { - protectedMemory = protectedMemory_; - return *this; - } - - operator VkPhysicalDeviceProtectedMemoryFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProtectedMemoryFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedMemory == rhs.protectedMemory ); - } - - bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceProtectedMemoryFeatures::sType; - }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceProtectedMemoryProperties - { - protected: - PhysicalDeviceProtectedMemoryProperties( vk::Bool32 protectedNoFault_ = 0 ) - : protectedNoFault( protectedNoFault_ ) - {} - - PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; - void* pNext = nullptr; - vk::Bool32 protectedNoFault; - }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceProtectedMemoryProperties : public layout::PhysicalDeviceProtectedMemoryProperties - { - operator VkPhysicalDeviceProtectedMemoryProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProtectedMemoryProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedNoFault == rhs.protectedNoFault ); - } - - bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceProtectedMemoryProperties::sType; - }; - static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDevicePushDescriptorPropertiesKHR - { - protected: - PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 ) - : maxPushDescriptors( maxPushDescriptors_ ) - {} - - PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; - void* pNext = nullptr; - uint32_t maxPushDescriptors; - }; - static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDevicePushDescriptorPropertiesKHR : public layout::PhysicalDevicePushDescriptorPropertiesKHR - { - operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePushDescriptorPropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxPushDescriptors == rhs.maxPushDescriptors ); - } - - bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDevicePushDescriptorPropertiesKHR::sType; - }; - static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceRayTracingPropertiesNV - { - protected: - PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = 0, - uint32_t maxRecursionDepth_ = 0, - uint32_t maxShaderGroupStride_ = 0, - uint32_t shaderGroupBaseAlignment_ = 0, - uint64_t maxGeometryCount_ = 0, - uint64_t maxInstanceCount_ = 0, - uint64_t maxTriangleCount_ = 0, - uint32_t maxDescriptorSetAccelerationStructures_ = 0 ) - : shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxTriangleCount( maxTriangleCount_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) - {} - - PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; - void* pNext = nullptr; - uint32_t shaderGroupHandleSize; - uint32_t maxRecursionDepth; - uint32_t maxShaderGroupStride; - uint32_t shaderGroupBaseAlignment; - uint64_t maxGeometryCount; - uint64_t maxInstanceCount; - uint64_t maxTriangleCount; - uint32_t maxDescriptorSetAccelerationStructures; - }; - static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceRayTracingPropertiesNV : public layout::PhysicalDeviceRayTracingPropertiesNV - { - operator VkPhysicalDeviceRayTracingPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRayTracingPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) - && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) - && ( maxGeometryCount == rhs.maxGeometryCount ) - && ( maxInstanceCount == rhs.maxInstanceCount ) - && ( maxTriangleCount == rhs.maxTriangleCount ) - && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); - } - - bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceRayTracingPropertiesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV - { - protected: - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( vk::Bool32 representativeFragmentTest_ = 0 ) - : representativeFragmentTest( representativeFragmentTest_ ) - {} - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; - void* pNext = nullptr; - vk::Bool32 representativeFragmentTest; - }; - static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV : public layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV - { - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( vk::Bool32 representativeFragmentTest_ = 0 ) - : layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV( representativeFragmentTest_ ) - {} - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) - : layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV( rhs ) - {} - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( vk::Bool32 representativeFragmentTest_ ) - { - representativeFragmentTest = representativeFragmentTest_; - return *this; - } - - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( representativeFragmentTest == rhs.representativeFragmentTest ); - } - - bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSampleLocationsPropertiesEXT - { - protected: - PhysicalDeviceSampleLocationsPropertiesEXT( vk::SampleCountFlags sampleLocationSampleCounts_ = vk::SampleCountFlags(), - vk::Extent2D maxSampleLocationGridSize_ = vk::Extent2D(), - std::array const& sampleLocationCoordinateRange_ = { { 0 } }, - uint32_t sampleLocationSubPixelBits_ = 0, - vk::Bool32 variableSampleLocations_ = 0 ) - : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) - , variableSampleLocations( variableSampleLocations_ ) - { - memcpy( &sampleLocationCoordinateRange, sampleLocationCoordinateRange_.data(), 2 * sizeof( float ) ); - - } - - PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; - void* pNext = nullptr; - vk::SampleCountFlags sampleLocationSampleCounts; - vk::Extent2D maxSampleLocationGridSize; - float sampleLocationCoordinateRange[2]; - uint32_t sampleLocationSubPixelBits; - vk::Bool32 variableSampleLocations; - }; - static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSampleLocationsPropertiesEXT : public layout::PhysicalDeviceSampleLocationsPropertiesEXT - { - operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) - && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) - && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 ) - && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) - && ( variableSampleLocations == rhs.variableSampleLocations ); - } - - bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSampleLocationsPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT - { - protected: - PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( vk::Bool32 filterMinmaxSingleComponentFormats_ = 0, - vk::Bool32 filterMinmaxImageComponentMapping_ = 0 ) - : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) - {} - - PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT; - void* pNext = nullptr; - vk::Bool32 filterMinmaxSingleComponentFormats; - vk::Bool32 filterMinmaxImageComponentMapping; - }; - static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT : public layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT - { - operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) - && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); - } - - bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSamplerYcbcrConversionFeatures - { - protected: - PhysicalDeviceSamplerYcbcrConversionFeatures( vk::Bool32 samplerYcbcrConversion_ = 0 ) - : samplerYcbcrConversion( samplerYcbcrConversion_ ) - {} - - PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; - void* pNext = nullptr; - vk::Bool32 samplerYcbcrConversion; - }; - static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSamplerYcbcrConversionFeatures : public layout::PhysicalDeviceSamplerYcbcrConversionFeatures - { - PhysicalDeviceSamplerYcbcrConversionFeatures( vk::Bool32 samplerYcbcrConversion_ = 0 ) - : layout::PhysicalDeviceSamplerYcbcrConversionFeatures( samplerYcbcrConversion_ ) - {} - - PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) - : layout::PhysicalDeviceSamplerYcbcrConversionFeatures( rhs ) - {} - - PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( vk::Bool32 samplerYcbcrConversion_ ) - { - samplerYcbcrConversion = samplerYcbcrConversion_; - return *this; - } - - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); - } - - bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSamplerYcbcrConversionFeatures::sType; - }; - static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceScalarBlockLayoutFeaturesEXT - { - protected: - PhysicalDeviceScalarBlockLayoutFeaturesEXT( vk::Bool32 scalarBlockLayout_ = 0 ) - : scalarBlockLayout( scalarBlockLayout_ ) - {} - - PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 scalarBlockLayout; - }; - static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceScalarBlockLayoutFeaturesEXT : public layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT - { - PhysicalDeviceScalarBlockLayoutFeaturesEXT( vk::Bool32 scalarBlockLayout_ = 0 ) - : layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT( scalarBlockLayout_ ) - {} - - PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) - : layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT( rhs ) - {} - - PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceScalarBlockLayoutFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceScalarBlockLayoutFeaturesEXT & setScalarBlockLayout( vk::Bool32 scalarBlockLayout_ ) - { - scalarBlockLayout = scalarBlockLayout_; - return *this; - } - - operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( scalarBlockLayout == rhs.scalarBlockLayout ); - } - - bool operator!=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderAtomicInt64FeaturesKHR - { - protected: - PhysicalDeviceShaderAtomicInt64FeaturesKHR( vk::Bool32 shaderBufferInt64Atomics_ = 0, - vk::Bool32 shaderSharedInt64Atomics_ = 0 ) - : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) - {} - - PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR; - void* pNext = nullptr; - vk::Bool32 shaderBufferInt64Atomics; - vk::Bool32 shaderSharedInt64Atomics; - }; - static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderAtomicInt64FeaturesKHR : public layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR - { - PhysicalDeviceShaderAtomicInt64FeaturesKHR( vk::Bool32 shaderBufferInt64Atomics_ = 0, - vk::Bool32 shaderSharedInt64Atomics_ = 0 ) - : layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR( shaderBufferInt64Atomics_, shaderSharedInt64Atomics_ ) - {} - - PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) - : layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR( rhs ) - {} - - PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderAtomicInt64FeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderBufferInt64Atomics( vk::Bool32 shaderBufferInt64Atomics_ ) - { - shaderBufferInt64Atomics = shaderBufferInt64Atomics_; - return *this; - } - - PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderSharedInt64Atomics( vk::Bool32 shaderSharedInt64Atomics_ ) - { - shaderSharedInt64Atomics = shaderSharedInt64Atomics_; - return *this; - } - - operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) - && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); - } - - bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderCoreProperties2AMD - { - protected: - PhysicalDeviceShaderCoreProperties2AMD( vk::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = vk::ShaderCorePropertiesFlagsAMD(), - uint32_t activeComputeUnitCount_ = 0 ) - : shaderCoreFeatures( shaderCoreFeatures_ ) - , activeComputeUnitCount( activeComputeUnitCount_ ) - {} - - PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; - void* pNext = nullptr; - vk::ShaderCorePropertiesFlagsAMD shaderCoreFeatures; - uint32_t activeComputeUnitCount; - }; - static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderCoreProperties2AMD : public layout::PhysicalDeviceShaderCoreProperties2AMD - { - operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderCoreProperties2AMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) - && ( activeComputeUnitCount == rhs.activeComputeUnitCount ); - } - - bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderCoreProperties2AMD::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderCorePropertiesAMD - { - protected: - PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = 0, - uint32_t shaderArraysPerEngineCount_ = 0, - uint32_t computeUnitsPerShaderArray_ = 0, - uint32_t simdPerComputeUnit_ = 0, - uint32_t wavefrontsPerSimd_ = 0, - uint32_t wavefrontSize_ = 0, - uint32_t sgprsPerSimd_ = 0, - uint32_t minSgprAllocation_ = 0, - uint32_t maxSgprAllocation_ = 0, - uint32_t sgprAllocationGranularity_ = 0, - uint32_t vgprsPerSimd_ = 0, - uint32_t minVgprAllocation_ = 0, - uint32_t maxVgprAllocation_ = 0, - uint32_t vgprAllocationGranularity_ = 0 ) - : shaderEngineCount( shaderEngineCount_ ) - , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) - , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) - , simdPerComputeUnit( simdPerComputeUnit_ ) - , wavefrontsPerSimd( wavefrontsPerSimd_ ) - , wavefrontSize( wavefrontSize_ ) - , sgprsPerSimd( sgprsPerSimd_ ) - , minSgprAllocation( minSgprAllocation_ ) - , maxSgprAllocation( maxSgprAllocation_ ) - , sgprAllocationGranularity( sgprAllocationGranularity_ ) - , vgprsPerSimd( vgprsPerSimd_ ) - , minVgprAllocation( minVgprAllocation_ ) - , maxVgprAllocation( maxVgprAllocation_ ) - , vgprAllocationGranularity( vgprAllocationGranularity_ ) - {} - - PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; - void* pNext = nullptr; - uint32_t shaderEngineCount; - uint32_t shaderArraysPerEngineCount; - uint32_t computeUnitsPerShaderArray; - uint32_t simdPerComputeUnit; - uint32_t wavefrontsPerSimd; - uint32_t wavefrontSize; - uint32_t sgprsPerSimd; - uint32_t minSgprAllocation; - uint32_t maxSgprAllocation; - uint32_t sgprAllocationGranularity; - uint32_t vgprsPerSimd; - uint32_t minVgprAllocation; - uint32_t maxVgprAllocation; - uint32_t vgprAllocationGranularity; - }; - static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderCorePropertiesAMD : public layout::PhysicalDeviceShaderCorePropertiesAMD - { - operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderCorePropertiesAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderEngineCount == rhs.shaderEngineCount ) - && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) - && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) - && ( simdPerComputeUnit == rhs.simdPerComputeUnit ) - && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) - && ( wavefrontSize == rhs.wavefrontSize ) - && ( sgprsPerSimd == rhs.sgprsPerSimd ) - && ( minSgprAllocation == rhs.minSgprAllocation ) - && ( maxSgprAllocation == rhs.maxSgprAllocation ) - && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) - && ( vgprsPerSimd == rhs.vgprsPerSimd ) - && ( minVgprAllocation == rhs.minVgprAllocation ) - && ( maxVgprAllocation == rhs.maxVgprAllocation ) - && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); - } - - bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderCorePropertiesAMD::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT - { - protected: - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( vk::Bool32 shaderDemoteToHelperInvocation_ = 0 ) - : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) - {} - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 shaderDemoteToHelperInvocation; - }; - static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : public layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT - { - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( vk::Bool32 shaderDemoteToHelperInvocation_ = 0 ) - : layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( shaderDemoteToHelperInvocation_ ) - {} - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) - : layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( rhs ) - {} - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( vk::Bool32 shaderDemoteToHelperInvocation_ ) - { - shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; - return *this; - } - - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); - } - - bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderDrawParametersFeatures - { - protected: - PhysicalDeviceShaderDrawParametersFeatures( vk::Bool32 shaderDrawParameters_ = 0 ) - : shaderDrawParameters( shaderDrawParameters_ ) - {} - - PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; - void* pNext = nullptr; - vk::Bool32 shaderDrawParameters; - }; - static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderDrawParametersFeatures : public layout::PhysicalDeviceShaderDrawParametersFeatures - { - PhysicalDeviceShaderDrawParametersFeatures( vk::Bool32 shaderDrawParameters_ = 0 ) - : layout::PhysicalDeviceShaderDrawParametersFeatures( shaderDrawParameters_ ) - {} - - PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) - : layout::PhysicalDeviceShaderDrawParametersFeatures( rhs ) - {} - - PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( vk::Bool32 shaderDrawParameters_ ) - { - shaderDrawParameters = shaderDrawParameters_; - return *this; - } - - operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderDrawParametersFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderDrawParameters == rhs.shaderDrawParameters ); - } - - bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderDrawParametersFeatures::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderFloat16Int8FeaturesKHR - { - protected: - PhysicalDeviceShaderFloat16Int8FeaturesKHR( vk::Bool32 shaderFloat16_ = 0, - vk::Bool32 shaderInt8_ = 0 ) - : shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) - {} - - PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR; - void* pNext = nullptr; - vk::Bool32 shaderFloat16; - vk::Bool32 shaderInt8; - }; - static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderFloat16Int8FeaturesKHR : public layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR - { - PhysicalDeviceShaderFloat16Int8FeaturesKHR( vk::Bool32 shaderFloat16_ = 0, - vk::Bool32 shaderInt8_ = 0 ) - : layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR( shaderFloat16_, shaderInt8_ ) - {} - - PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) - : layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR( rhs ) - {} - - PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderFloat16Int8FeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderFloat16( vk::Bool32 shaderFloat16_ ) - { - shaderFloat16 = shaderFloat16_; - return *this; - } - - PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderInt8( vk::Bool32 shaderInt8_ ) - { - shaderInt8 = shaderInt8_; - return *this; - } - - operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderFloat16 == rhs.shaderFloat16 ) - && ( shaderInt8 == rhs.shaderInt8 ); - } - - bool operator!=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderImageFootprintFeaturesNV - { - protected: - PhysicalDeviceShaderImageFootprintFeaturesNV( vk::Bool32 imageFootprint_ = 0 ) - : imageFootprint( imageFootprint_ ) - {} - - PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; - void* pNext = nullptr; - vk::Bool32 imageFootprint; - }; - static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderImageFootprintFeaturesNV : public layout::PhysicalDeviceShaderImageFootprintFeaturesNV - { - PhysicalDeviceShaderImageFootprintFeaturesNV( vk::Bool32 imageFootprint_ = 0 ) - : layout::PhysicalDeviceShaderImageFootprintFeaturesNV( imageFootprint_ ) - {} - - PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) - : layout::PhysicalDeviceShaderImageFootprintFeaturesNV( rhs ) - {} - - PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( vk::Bool32 imageFootprint_ ) - { - imageFootprint = imageFootprint_; - return *this; - } - - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( imageFootprint == rhs.imageFootprint ); - } - - bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderImageFootprintFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL - { - protected: - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( vk::Bool32 shaderIntegerFunctions2_ = 0 ) - : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) - {} - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - void* pNext = nullptr; - vk::Bool32 shaderIntegerFunctions2; - }; - static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : public layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL - { - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( vk::Bool32 shaderIntegerFunctions2_ = 0 ) - : layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( shaderIntegerFunctions2_ ) - {} - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) - : layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( rhs ) - {} - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( vk::Bool32 shaderIntegerFunctions2_ ) - { - shaderIntegerFunctions2 = shaderIntegerFunctions2_; - return *this; - } - - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); - } - - bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV - { - protected: - PhysicalDeviceShaderSMBuiltinsFeaturesNV( vk::Bool32 shaderSMBuiltins_ = 0 ) - : shaderSMBuiltins( shaderSMBuiltins_ ) - {} - - PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; - void* pNext = nullptr; - vk::Bool32 shaderSMBuiltins; - }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV : public layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV - { - PhysicalDeviceShaderSMBuiltinsFeaturesNV( vk::Bool32 shaderSMBuiltins_ = 0 ) - : layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV( shaderSMBuiltins_ ) - {} - - PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) - : layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV( rhs ) - {} - - PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( vk::Bool32 shaderSMBuiltins_ ) - { - shaderSMBuiltins = shaderSMBuiltins_; - return *this; - } - - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); - } - - bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV - { - protected: - PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = 0, - uint32_t shaderWarpsPerSM_ = 0 ) - : shaderSMCount( shaderSMCount_ ) - , shaderWarpsPerSM( shaderWarpsPerSM_ ) - {} - - PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; - void* pNext = nullptr; - uint32_t shaderSMCount; - uint32_t shaderWarpsPerSM; - }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV : public layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV - { - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shaderSMCount == rhs.shaderSMCount ) - && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); - } - - bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShadingRateImageFeaturesNV - { - protected: - PhysicalDeviceShadingRateImageFeaturesNV( vk::Bool32 shadingRateImage_ = 0, - vk::Bool32 shadingRateCoarseSampleOrder_ = 0 ) - : shadingRateImage( shadingRateImage_ ) - , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) - {} - - PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; - void* pNext = nullptr; - vk::Bool32 shadingRateImage; - vk::Bool32 shadingRateCoarseSampleOrder; - }; - static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShadingRateImageFeaturesNV : public layout::PhysicalDeviceShadingRateImageFeaturesNV - { - PhysicalDeviceShadingRateImageFeaturesNV( vk::Bool32 shadingRateImage_ = 0, - vk::Bool32 shadingRateCoarseSampleOrder_ = 0 ) - : layout::PhysicalDeviceShadingRateImageFeaturesNV( shadingRateImage_, shadingRateCoarseSampleOrder_ ) - {} - - PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) - : layout::PhysicalDeviceShadingRateImageFeaturesNV( rhs ) - {} - - PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( vk::Bool32 shadingRateImage_ ) - { - shadingRateImage = shadingRateImage_; - return *this; - } - - PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( vk::Bool32 shadingRateCoarseSampleOrder_ ) - { - shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; - return *this; - } - - operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShadingRateImageFeaturesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateImage == rhs.shadingRateImage ) - && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); - } - - bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShadingRateImageFeaturesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceShadingRateImagePropertiesNV - { - protected: - PhysicalDeviceShadingRateImagePropertiesNV( vk::Extent2D shadingRateTexelSize_ = vk::Extent2D(), - uint32_t shadingRatePaletteSize_ = 0, - uint32_t shadingRateMaxCoarseSamples_ = 0 ) - : shadingRateTexelSize( shadingRateTexelSize_ ) - , shadingRatePaletteSize( shadingRatePaletteSize_ ) - , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) - {} - - PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; - void* pNext = nullptr; - vk::Extent2D shadingRateTexelSize; - uint32_t shadingRatePaletteSize; - uint32_t shadingRateMaxCoarseSamples; - }; - static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceShadingRateImagePropertiesNV : public layout::PhysicalDeviceShadingRateImagePropertiesNV - { - operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShadingRateImagePropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) - && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) - && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); - } - - bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceShadingRateImagePropertiesNV::sType; - }; - static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSparseImageFormatInfo2 - { - protected: - PhysicalDeviceSparseImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined, - vk::ImageType type_ = vk::ImageType::e1D, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal ) - : format( format_ ) - , type( type_ ) - , samples( samples_ ) - , usage( usage_ ) - , tiling( tiling_ ) - {} - - PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; - const void* pNext = nullptr; - vk::Format format; - vk::ImageType type; - vk::SampleCountFlagBits samples; - vk::ImageUsageFlags usage; - vk::ImageTiling tiling; - }; - static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSparseImageFormatInfo2 : public layout::PhysicalDeviceSparseImageFormatInfo2 - { - PhysicalDeviceSparseImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined, - vk::ImageType type_ = vk::ImageType::e1D, - vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1, - vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(), - vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal ) - : layout::PhysicalDeviceSparseImageFormatInfo2( format_, type_, samples_, usage_, tiling_ ) - {} - - PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) - : layout::PhysicalDeviceSparseImageFormatInfo2( rhs ) - {} - - PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setType( vk::ImageType type_ ) - { - type = type_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setSamples( vk::SampleCountFlagBits samples_ ) - { - samples = samples_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setUsage( vk::ImageUsageFlags usage_ ) - { - usage = usage_; - return *this; - } - - PhysicalDeviceSparseImageFormatInfo2 & setTiling( vk::ImageTiling tiling_ ) - { - tiling = tiling_; - return *this; - } - - operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSparseImageFormatInfo2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( type == rhs.type ) - && ( samples == rhs.samples ) - && ( usage == rhs.usage ) - && ( tiling == rhs.tiling ); - } - - bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSparseImageFormatInfo2::sType; - }; - static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSubgroupProperties - { - protected: - PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = 0, - vk::ShaderStageFlags supportedStages_ = vk::ShaderStageFlags(), - vk::SubgroupFeatureFlags supportedOperations_ = vk::SubgroupFeatureFlags(), - vk::Bool32 quadOperationsInAllStages_ = 0 ) - : subgroupSize( subgroupSize_ ) - , supportedStages( supportedStages_ ) - , supportedOperations( supportedOperations_ ) - , quadOperationsInAllStages( quadOperationsInAllStages_ ) - {} - - PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; - void* pNext = nullptr; - uint32_t subgroupSize; - vk::ShaderStageFlags supportedStages; - vk::SubgroupFeatureFlags supportedOperations; - vk::Bool32 quadOperationsInAllStages; - }; - static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSubgroupProperties : public layout::PhysicalDeviceSubgroupProperties - { - operator VkPhysicalDeviceSubgroupProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSubgroupProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subgroupSize == rhs.subgroupSize ) - && ( supportedStages == rhs.supportedStages ) - && ( supportedOperations == rhs.supportedOperations ) - && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); - } - - bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSubgroupProperties::sType; - }; - static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSubgroupSizeControlFeaturesEXT - { - protected: - PhysicalDeviceSubgroupSizeControlFeaturesEXT( vk::Bool32 subgroupSizeControl_ = 0, - vk::Bool32 computeFullSubgroups_ = 0 ) - : subgroupSizeControl( subgroupSizeControl_ ) - , computeFullSubgroups( computeFullSubgroups_ ) - {} - - PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 subgroupSizeControl; - vk::Bool32 computeFullSubgroups; - }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSubgroupSizeControlFeaturesEXT : public layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT - { - PhysicalDeviceSubgroupSizeControlFeaturesEXT( vk::Bool32 subgroupSizeControl_ = 0, - vk::Bool32 computeFullSubgroups_ = 0 ) - : layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT( subgroupSizeControl_, computeFullSubgroups_ ) - {} - - PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) - : layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT( rhs ) - {} - - PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( vk::Bool32 subgroupSizeControl_ ) - { - subgroupSizeControl = subgroupSizeControl_; - return *this; - } - - PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( vk::Bool32 computeFullSubgroups_ ) - { - computeFullSubgroups = computeFullSubgroups_; - return *this; - } - - operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subgroupSizeControl == rhs.subgroupSizeControl ) - && ( computeFullSubgroups == rhs.computeFullSubgroups ); - } - - bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSubgroupSizeControlPropertiesEXT - { - protected: - PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = 0, - uint32_t maxSubgroupSize_ = 0, - uint32_t maxComputeWorkgroupSubgroups_ = 0, - vk::ShaderStageFlags requiredSubgroupSizeStages_ = vk::ShaderStageFlags() ) - : minSubgroupSize( minSubgroupSize_ ) - , maxSubgroupSize( maxSubgroupSize_ ) - , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) - , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) - {} - - PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; - void* pNext = nullptr; - uint32_t minSubgroupSize; - uint32_t maxSubgroupSize; - uint32_t maxComputeWorkgroupSubgroups; - vk::ShaderStageFlags requiredSubgroupSizeStages; - }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSubgroupSizeControlPropertiesEXT : public layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT - { - operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minSubgroupSize == rhs.minSubgroupSize ) - && ( maxSubgroupSize == rhs.maxSubgroupSize ) - && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) - && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); - } - - bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceSurfaceInfo2KHR - { - protected: - PhysicalDeviceSurfaceInfo2KHR( vk::SurfaceKHR surface_ = vk::SurfaceKHR() ) - : surface( surface_ ) - {} - - PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; - const void* pNext = nullptr; - vk::SurfaceKHR surface; - }; - static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceSurfaceInfo2KHR : public layout::PhysicalDeviceSurfaceInfo2KHR - { - PhysicalDeviceSurfaceInfo2KHR( vk::SurfaceKHR surface_ = vk::SurfaceKHR() ) - : layout::PhysicalDeviceSurfaceInfo2KHR( surface_ ) - {} - - PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) - : layout::PhysicalDeviceSurfaceInfo2KHR( rhs ) - {} - - PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceSurfaceInfo2KHR & setSurface( vk::SurfaceKHR surface_ ) - { - surface = surface_; - return *this; - } - - operator VkPhysicalDeviceSurfaceInfo2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSurfaceInfo2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surface == rhs.surface ); - } - - bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceSurfaceInfo2KHR::sType; - }; - static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT - { - protected: - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( vk::Bool32 texelBufferAlignment_ = 0 ) - : texelBufferAlignment( texelBufferAlignment_ ) - {} - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 texelBufferAlignment; - }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT : public layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT - { - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( vk::Bool32 texelBufferAlignment_ = 0 ) - : layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT( texelBufferAlignment_ ) - {} - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) - : layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT( rhs ) - {} - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( vk::Bool32 texelBufferAlignment_ ) - { - texelBufferAlignment = texelBufferAlignment_; - return *this; - } - - operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( texelBufferAlignment == rhs.texelBufferAlignment ); - } - - bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT - { - protected: - PhysicalDeviceTexelBufferAlignmentPropertiesEXT( vk::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = 0, - vk::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = 0, - vk::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = 0, - vk::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = 0 ) - : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) - , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) - , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) - , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) - {} - - PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; - void* pNext = nullptr; - vk::DeviceSize storageTexelBufferOffsetAlignmentBytes; - vk::Bool32 storageTexelBufferOffsetSingleTexelAlignment; - vk::DeviceSize uniformTexelBufferOffsetAlignmentBytes; - vk::Bool32 uniformTexelBufferOffsetSingleTexelAlignment; - }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT : public layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT - { - operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) - && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) - && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) - && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); - } - - bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT - { - protected: - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( vk::Bool32 textureCompressionASTC_HDR_ = 0 ) - : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) - {} - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; - const void* pNext = nullptr; - vk::Bool32 textureCompressionASTC_HDR; - }; - static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT : public layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT - { - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( vk::Bool32 textureCompressionASTC_HDR_ = 0 ) - : layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( textureCompressionASTC_HDR_ ) - {} - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) - : layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( rhs ) - {} - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( vk::Bool32 textureCompressionASTC_HDR_ ) - { - textureCompressionASTC_HDR = textureCompressionASTC_HDR_; - return *this; - } - - operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); - } - - bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceTransformFeedbackFeaturesEXT - { - protected: - PhysicalDeviceTransformFeedbackFeaturesEXT( vk::Bool32 transformFeedback_ = 0, - vk::Bool32 geometryStreams_ = 0 ) - : transformFeedback( transformFeedback_ ) - , geometryStreams( geometryStreams_ ) - {} - - PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 transformFeedback; - vk::Bool32 geometryStreams; - }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceTransformFeedbackFeaturesEXT : public layout::PhysicalDeviceTransformFeedbackFeaturesEXT - { - PhysicalDeviceTransformFeedbackFeaturesEXT( vk::Bool32 transformFeedback_ = 0, - vk::Bool32 geometryStreams_ = 0 ) - : layout::PhysicalDeviceTransformFeedbackFeaturesEXT( transformFeedback_, geometryStreams_ ) - {} - - PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) - : layout::PhysicalDeviceTransformFeedbackFeaturesEXT( rhs ) - {} - - PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( vk::Bool32 transformFeedback_ ) - { - transformFeedback = transformFeedback_; - return *this; - } - - PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( vk::Bool32 geometryStreams_ ) - { - geometryStreams = geometryStreams_; - return *this; - } - - operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( transformFeedback == rhs.transformFeedback ) - && ( geometryStreams == rhs.geometryStreams ); - } - - bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceTransformFeedbackFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceTransformFeedbackPropertiesEXT - { - protected: - PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = 0, - uint32_t maxTransformFeedbackBuffers_ = 0, - vk::DeviceSize maxTransformFeedbackBufferSize_ = 0, - uint32_t maxTransformFeedbackStreamDataSize_ = 0, - uint32_t maxTransformFeedbackBufferDataSize_ = 0, - uint32_t maxTransformFeedbackBufferDataStride_ = 0, - vk::Bool32 transformFeedbackQueries_ = 0, - vk::Bool32 transformFeedbackStreamsLinesTriangles_ = 0, - vk::Bool32 transformFeedbackRasterizationStreamSelect_ = 0, - vk::Bool32 transformFeedbackDraw_ = 0 ) - : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) - , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) - , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) - , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) - , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) - , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) - , transformFeedbackQueries( transformFeedbackQueries_ ) - , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) - , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) - , transformFeedbackDraw( transformFeedbackDraw_ ) - {} - - PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; - void* pNext = nullptr; - uint32_t maxTransformFeedbackStreams; - uint32_t maxTransformFeedbackBuffers; - vk::DeviceSize maxTransformFeedbackBufferSize; - uint32_t maxTransformFeedbackStreamDataSize; - uint32_t maxTransformFeedbackBufferDataSize; - uint32_t maxTransformFeedbackBufferDataStride; - vk::Bool32 transformFeedbackQueries; - vk::Bool32 transformFeedbackStreamsLinesTriangles; - vk::Bool32 transformFeedbackRasterizationStreamSelect; - vk::Bool32 transformFeedbackDraw; - }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceTransformFeedbackPropertiesEXT : public layout::PhysicalDeviceTransformFeedbackPropertiesEXT - { - operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) - && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) - && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) - && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) - && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) - && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) - && ( transformFeedbackQueries == rhs.transformFeedbackQueries ) - && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) - && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) - && ( transformFeedbackDraw == rhs.transformFeedbackDraw ); - } - - bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceTransformFeedbackPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR - { - protected: - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( vk::Bool32 uniformBufferStandardLayout_ = 0 ) - : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) - {} - - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; - void* pNext = nullptr; - vk::Bool32 uniformBufferStandardLayout; - }; - static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR : public layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR - { - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( vk::Bool32 uniformBufferStandardLayout_ = 0 ) - : layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( uniformBufferStandardLayout_ ) - {} - - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) - : layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( rhs ) - {} - - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setUniformBufferStandardLayout( vk::Bool32 uniformBufferStandardLayout_ ) - { - uniformBufferStandardLayout = uniformBufferStandardLayout_; - return *this; - } - - operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); - } - - bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceVariablePointersFeatures - { - protected: - PhysicalDeviceVariablePointersFeatures( vk::Bool32 variablePointersStorageBuffer_ = 0, - vk::Bool32 variablePointers_ = 0 ) - : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) - {} - - PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; - void* pNext = nullptr; - vk::Bool32 variablePointersStorageBuffer; - vk::Bool32 variablePointers; - }; - static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceVariablePointersFeatures : public layout::PhysicalDeviceVariablePointersFeatures - { - PhysicalDeviceVariablePointersFeatures( vk::Bool32 variablePointersStorageBuffer_ = 0, - vk::Bool32 variablePointers_ = 0 ) - : layout::PhysicalDeviceVariablePointersFeatures( variablePointersStorageBuffer_, variablePointers_ ) - {} - - PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) - : layout::PhysicalDeviceVariablePointersFeatures( rhs ) - {} - - PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( vk::Bool32 variablePointersStorageBuffer_ ) - { - variablePointersStorageBuffer = variablePointersStorageBuffer_; - return *this; - } - - PhysicalDeviceVariablePointersFeatures & setVariablePointers( vk::Bool32 variablePointers_ ) - { - variablePointers = variablePointers_; - return *this; - } - - operator VkPhysicalDeviceVariablePointersFeatures const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceVariablePointersFeatures &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) - && ( variablePointers == rhs.variablePointers ); - } - - bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceVariablePointersFeatures::sType; - }; - static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT - { - protected: - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( vk::Bool32 vertexAttributeInstanceRateDivisor_ = 0, - vk::Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 ) - : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) - , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) - {} - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 vertexAttributeInstanceRateDivisor; - vk::Bool32 vertexAttributeInstanceRateZeroDivisor; - }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT : public layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT - { - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( vk::Bool32 vertexAttributeInstanceRateDivisor_ = 0, - vk::Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 ) - : layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT( vertexAttributeInstanceRateDivisor_, vertexAttributeInstanceRateZeroDivisor_ ) - {} - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) - : layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT( rhs ) - {} - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( vk::Bool32 vertexAttributeInstanceRateDivisor_ ) - { - vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; - return *this; - } - - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( vk::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) - { - vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; - return *this; - } - - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) - && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); - } - - bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT - { - protected: - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = 0 ) - : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) - {} - - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; - void* pNext = nullptr; - uint32_t maxVertexAttribDivisor; - }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT : public layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT - { - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); - } - - bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceVulkanMemoryModelFeaturesKHR - { - protected: - PhysicalDeviceVulkanMemoryModelFeaturesKHR( vk::Bool32 vulkanMemoryModel_ = 0, - vk::Bool32 vulkanMemoryModelDeviceScope_ = 0, - vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = 0 ) - : vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) - {} - - PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR; - void* pNext = nullptr; - vk::Bool32 vulkanMemoryModel; - vk::Bool32 vulkanMemoryModelDeviceScope; - vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains; - }; - static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceVulkanMemoryModelFeaturesKHR : public layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR - { - PhysicalDeviceVulkanMemoryModelFeaturesKHR( vk::Bool32 vulkanMemoryModel_ = 0, - vk::Bool32 vulkanMemoryModelDeviceScope_ = 0, - vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = 0 ) - : layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR( vulkanMemoryModel_, vulkanMemoryModelDeviceScope_, vulkanMemoryModelAvailabilityVisibilityChains_ ) - {} - - PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) - : layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR( rhs ) - {} - - PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceVulkanMemoryModelFeaturesKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModel( vk::Bool32 vulkanMemoryModel_ ) - { - vulkanMemoryModel = vulkanMemoryModel_; - return *this; - } - - PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelDeviceScope( vk::Bool32 vulkanMemoryModelDeviceScope_ ) - { - vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; - return *this; - } - - PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelAvailabilityVisibilityChains( vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) - { - vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; - return *this; - } - - operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) - && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) - && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); - } - - bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR::sType; - }; - static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT - { - protected: - PhysicalDeviceYcbcrImageArraysFeaturesEXT( vk::Bool32 ycbcrImageArrays_ = 0 ) - : ycbcrImageArrays( ycbcrImageArrays_ ) - {} - - PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; - void* pNext = nullptr; - vk::Bool32 ycbcrImageArrays; - }; - static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT : public layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT - { - PhysicalDeviceYcbcrImageArraysFeaturesEXT( vk::Bool32 ycbcrImageArrays_ = 0 ) - : layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT( ycbcrImageArrays_ ) - {} - - PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) - : layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT( rhs ) - {} - - PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( vk::Bool32 ycbcrImageArrays_ ) - { - ycbcrImageArrays = ycbcrImageArrays_; - return *this; - } - - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); - } - - bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT::sType; - }; - static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineCacheCreateInfo - { - protected: - PipelineCacheCreateInfo( vk::PipelineCacheCreateFlags flags_ = vk::PipelineCacheCreateFlags(), - size_t initialDataSize_ = 0, - const void* pInitialData_ = nullptr ) - : flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) - {} - - PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineCacheCreateInfo; - const void* pNext = nullptr; - vk::PipelineCacheCreateFlags flags; - size_t initialDataSize; - const void* pInitialData; - }; - static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineCacheCreateInfo : public layout::PipelineCacheCreateInfo - { - PipelineCacheCreateInfo( vk::PipelineCacheCreateFlags flags_ = vk::PipelineCacheCreateFlags(), - size_t initialDataSize_ = 0, - const void* pInitialData_ = nullptr ) - : layout::PipelineCacheCreateInfo( flags_, initialDataSize_, pInitialData_ ) - {} - - PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) - : layout::PipelineCacheCreateInfo( rhs ) - {} - - PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineCacheCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineCacheCreateInfo & setFlags( vk::PipelineCacheCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) - { - initialDataSize = initialDataSize_; - return *this; - } - - PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) - { - pInitialData = pInitialData_; - return *this; - } - - operator VkPipelineCacheCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCacheCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCacheCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); - } - - bool operator!=( PipelineCacheCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineCacheCreateInfo::sType; - }; - static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineColorBlendAdvancedStateCreateInfoEXT - { - protected: - PipelineColorBlendAdvancedStateCreateInfoEXT( vk::Bool32 srcPremultiplied_ = 0, - vk::Bool32 dstPremultiplied_ = 0, - vk::BlendOverlapEXT blendOverlap_ = vk::BlendOverlapEXT::eUncorrelated ) - : srcPremultiplied( srcPremultiplied_ ) - , dstPremultiplied( dstPremultiplied_ ) - , blendOverlap( blendOverlap_ ) - {} - - PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; - const void* pNext = nullptr; - vk::Bool32 srcPremultiplied; - vk::Bool32 dstPremultiplied; - vk::BlendOverlapEXT blendOverlap; - }; - static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineColorBlendAdvancedStateCreateInfoEXT : public layout::PipelineColorBlendAdvancedStateCreateInfoEXT - { - PipelineColorBlendAdvancedStateCreateInfoEXT( vk::Bool32 srcPremultiplied_ = 0, - vk::Bool32 dstPremultiplied_ = 0, - vk::BlendOverlapEXT blendOverlap_ = vk::BlendOverlapEXT::eUncorrelated ) - : layout::PipelineColorBlendAdvancedStateCreateInfoEXT( srcPremultiplied_, dstPremultiplied_, blendOverlap_ ) - {} - - PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) - : layout::PipelineColorBlendAdvancedStateCreateInfoEXT( rhs ) - {} - - PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( vk::Bool32 srcPremultiplied_ ) - { - srcPremultiplied = srcPremultiplied_; - return *this; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( vk::Bool32 dstPremultiplied_ ) - { - dstPremultiplied = dstPremultiplied_; - return *this; - } - - PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( vk::BlendOverlapEXT blendOverlap_ ) - { - blendOverlap = blendOverlap_; - return *this; - } - - operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcPremultiplied == rhs.srcPremultiplied ) - && ( dstPremultiplied == rhs.dstPremultiplied ) - && ( blendOverlap == rhs.blendOverlap ); - } - - bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineColorBlendAdvancedStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineCompilerControlCreateInfoAMD - { - protected: - PipelineCompilerControlCreateInfoAMD( vk::PipelineCompilerControlFlagsAMD compilerControlFlags_ = vk::PipelineCompilerControlFlagsAMD() ) - : compilerControlFlags( compilerControlFlags_ ) - {} - - PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; - const void* pNext = nullptr; - vk::PipelineCompilerControlFlagsAMD compilerControlFlags; - }; - static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "layout struct and wrapper have different size!" ); - } - - struct PipelineCompilerControlCreateInfoAMD : public layout::PipelineCompilerControlCreateInfoAMD - { - PipelineCompilerControlCreateInfoAMD( vk::PipelineCompilerControlFlagsAMD compilerControlFlags_ = vk::PipelineCompilerControlFlagsAMD() ) - : layout::PipelineCompilerControlCreateInfoAMD( compilerControlFlags_ ) - {} - - PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) - : layout::PipelineCompilerControlCreateInfoAMD( rhs ) - {} - - PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( vk::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) - { - compilerControlFlags = compilerControlFlags_; - return *this; - } - - operator VkPipelineCompilerControlCreateInfoAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCompilerControlCreateInfoAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( compilerControlFlags == rhs.compilerControlFlags ); - } - - bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineCompilerControlCreateInfoAMD::sType; - }; - static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineCoverageModulationStateCreateInfoNV - { - protected: - PipelineCoverageModulationStateCreateInfoNV( vk::PipelineCoverageModulationStateCreateFlagsNV flags_ = vk::PipelineCoverageModulationStateCreateFlagsNV(), - vk::CoverageModulationModeNV coverageModulationMode_ = vk::CoverageModulationModeNV::eNone, - vk::Bool32 coverageModulationTableEnable_ = 0, - uint32_t coverageModulationTableCount_ = 0, - const float* pCoverageModulationTable_ = nullptr ) - : flags( flags_ ) - , coverageModulationMode( coverageModulationMode_ ) - , coverageModulationTableEnable( coverageModulationTableEnable_ ) - , coverageModulationTableCount( coverageModulationTableCount_ ) - , pCoverageModulationTable( pCoverageModulationTable_ ) - {} - - PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; - const void* pNext = nullptr; - vk::PipelineCoverageModulationStateCreateFlagsNV flags; - vk::CoverageModulationModeNV coverageModulationMode; - vk::Bool32 coverageModulationTableEnable; - uint32_t coverageModulationTableCount; - const float* pCoverageModulationTable; - }; - static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineCoverageModulationStateCreateInfoNV : public layout::PipelineCoverageModulationStateCreateInfoNV - { - PipelineCoverageModulationStateCreateInfoNV( vk::PipelineCoverageModulationStateCreateFlagsNV flags_ = vk::PipelineCoverageModulationStateCreateFlagsNV(), - vk::CoverageModulationModeNV coverageModulationMode_ = vk::CoverageModulationModeNV::eNone, - vk::Bool32 coverageModulationTableEnable_ = 0, - uint32_t coverageModulationTableCount_ = 0, - const float* pCoverageModulationTable_ = nullptr ) - : layout::PipelineCoverageModulationStateCreateInfoNV( flags_, coverageModulationMode_, coverageModulationTableEnable_, coverageModulationTableCount_, pCoverageModulationTable_ ) - {} - - PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) - : layout::PipelineCoverageModulationStateCreateInfoNV( rhs ) - {} - - PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setFlags( vk::PipelineCoverageModulationStateCreateFlagsNV flags_ ) - { - flags = flags_; - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( vk::CoverageModulationModeNV coverageModulationMode_ ) - { - coverageModulationMode = coverageModulationMode_; - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( vk::Bool32 coverageModulationTableEnable_ ) - { - coverageModulationTableEnable = coverageModulationTableEnable_; - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) - { - coverageModulationTableCount = coverageModulationTableCount_; - return *this; - } - - PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) - { - pCoverageModulationTable = pCoverageModulationTable_; - return *this; - } - - operator VkPipelineCoverageModulationStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCoverageModulationStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageModulationMode == rhs.coverageModulationMode ) - && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) - && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) - && ( pCoverageModulationTable == rhs.pCoverageModulationTable ); - } - - bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineCoverageModulationStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineCoverageReductionStateCreateInfoNV - { - protected: - PipelineCoverageReductionStateCreateInfoNV( vk::PipelineCoverageReductionStateCreateFlagsNV flags_ = vk::PipelineCoverageReductionStateCreateFlagsNV(), - vk::CoverageReductionModeNV coverageReductionMode_ = vk::CoverageReductionModeNV::eMerge ) - : flags( flags_ ) - , coverageReductionMode( coverageReductionMode_ ) - {} - - PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; - const void* pNext = nullptr; - vk::PipelineCoverageReductionStateCreateFlagsNV flags; - vk::CoverageReductionModeNV coverageReductionMode; - }; - static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineCoverageReductionStateCreateInfoNV : public layout::PipelineCoverageReductionStateCreateInfoNV - { - PipelineCoverageReductionStateCreateInfoNV( vk::PipelineCoverageReductionStateCreateFlagsNV flags_ = vk::PipelineCoverageReductionStateCreateFlagsNV(), - vk::CoverageReductionModeNV coverageReductionMode_ = vk::CoverageReductionModeNV::eMerge ) - : layout::PipelineCoverageReductionStateCreateInfoNV( flags_, coverageReductionMode_ ) - {} - - PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) - : layout::PipelineCoverageReductionStateCreateInfoNV( rhs ) - {} - - PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineCoverageReductionStateCreateInfoNV & setFlags( vk::PipelineCoverageReductionStateCreateFlagsNV flags_ ) - { - flags = flags_; - return *this; - } - - PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( vk::CoverageReductionModeNV coverageReductionMode_ ) - { - coverageReductionMode = coverageReductionMode_; - return *this; - } - - operator VkPipelineCoverageReductionStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCoverageReductionStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageReductionMode == rhs.coverageReductionMode ); - } - - bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineCoverageReductionStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineCoverageToColorStateCreateInfoNV - { - protected: - PipelineCoverageToColorStateCreateInfoNV( vk::PipelineCoverageToColorStateCreateFlagsNV flags_ = vk::PipelineCoverageToColorStateCreateFlagsNV(), - vk::Bool32 coverageToColorEnable_ = 0, - uint32_t coverageToColorLocation_ = 0 ) - : flags( flags_ ) - , coverageToColorEnable( coverageToColorEnable_ ) - , coverageToColorLocation( coverageToColorLocation_ ) - {} - - PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; - const void* pNext = nullptr; - vk::PipelineCoverageToColorStateCreateFlagsNV flags; - vk::Bool32 coverageToColorEnable; - uint32_t coverageToColorLocation; - }; - static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineCoverageToColorStateCreateInfoNV : public layout::PipelineCoverageToColorStateCreateInfoNV - { - PipelineCoverageToColorStateCreateInfoNV( vk::PipelineCoverageToColorStateCreateFlagsNV flags_ = vk::PipelineCoverageToColorStateCreateFlagsNV(), - vk::Bool32 coverageToColorEnable_ = 0, - uint32_t coverageToColorLocation_ = 0 ) - : layout::PipelineCoverageToColorStateCreateInfoNV( flags_, coverageToColorEnable_, coverageToColorLocation_ ) - {} - - PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) - : layout::PipelineCoverageToColorStateCreateInfoNV( rhs ) - {} - - PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineCoverageToColorStateCreateInfoNV & setFlags( vk::PipelineCoverageToColorStateCreateFlagsNV flags_ ) - { - flags = flags_; - return *this; - } - - PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( vk::Bool32 coverageToColorEnable_ ) - { - coverageToColorEnable = coverageToColorEnable_; - return *this; - } - - PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) - { - coverageToColorLocation = coverageToColorLocation_; - return *this; - } - - operator VkPipelineCoverageToColorStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCoverageToColorStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( coverageToColorEnable == rhs.coverageToColorEnable ) - && ( coverageToColorLocation == rhs.coverageToColorLocation ); - } - - bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineCoverageToColorStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PipelineCreationFeedbackEXT - { - operator VkPipelineCreationFeedbackEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCreationFeedbackEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCreationFeedbackEXT const& rhs ) const - { - return ( flags == rhs.flags ) - && ( duration == rhs.duration ); - } - - bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::PipelineCreationFeedbackFlagsEXT flags; - uint64_t duration; - }; - static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineCreationFeedbackCreateInfoEXT - { - protected: - PipelineCreationFeedbackCreateInfoEXT( vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = nullptr, - uint32_t pipelineStageCreationFeedbackCount_ = 0, - vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = nullptr ) - : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) - , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) - , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) - {} - - PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; - const void* pNext = nullptr; - vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback; - uint32_t pipelineStageCreationFeedbackCount; - vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks; - }; - static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineCreationFeedbackCreateInfoEXT : public layout::PipelineCreationFeedbackCreateInfoEXT - { - PipelineCreationFeedbackCreateInfoEXT( vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = nullptr, - uint32_t pipelineStageCreationFeedbackCount_ = 0, - vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = nullptr ) - : layout::PipelineCreationFeedbackCreateInfoEXT( pPipelineCreationFeedback_, pipelineStageCreationFeedbackCount_, pPipelineStageCreationFeedbacks_ ) - {} - - PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) - : layout::PipelineCreationFeedbackCreateInfoEXT( rhs ) - {} - - PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) - { - pPipelineCreationFeedback = pPipelineCreationFeedback_; - return *this; - } - - PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) - { - pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; - return *this; - } - - PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) - { - pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; - return *this; - } - - operator VkPipelineCreationFeedbackCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineCreationFeedbackCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) - && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) - && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); - } - - bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineCreationFeedbackCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineDiscardRectangleStateCreateInfoEXT - { - protected: - PipelineDiscardRectangleStateCreateInfoEXT( vk::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = vk::PipelineDiscardRectangleStateCreateFlagsEXT(), - vk::DiscardRectangleModeEXT discardRectangleMode_ = vk::DiscardRectangleModeEXT::eInclusive, - uint32_t discardRectangleCount_ = 0, - const vk::Rect2D* pDiscardRectangles_ = nullptr ) - : flags( flags_ ) - , discardRectangleMode( discardRectangleMode_ ) - , discardRectangleCount( discardRectangleCount_ ) - , pDiscardRectangles( pDiscardRectangles_ ) - {} - - PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; - const void* pNext = nullptr; - vk::PipelineDiscardRectangleStateCreateFlagsEXT flags; - vk::DiscardRectangleModeEXT discardRectangleMode; - uint32_t discardRectangleCount; - const vk::Rect2D* pDiscardRectangles; - }; - static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineDiscardRectangleStateCreateInfoEXT : public layout::PipelineDiscardRectangleStateCreateInfoEXT - { - PipelineDiscardRectangleStateCreateInfoEXT( vk::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = vk::PipelineDiscardRectangleStateCreateFlagsEXT(), - vk::DiscardRectangleModeEXT discardRectangleMode_ = vk::DiscardRectangleModeEXT::eInclusive, - uint32_t discardRectangleCount_ = 0, - const vk::Rect2D* pDiscardRectangles_ = nullptr ) - : layout::PipelineDiscardRectangleStateCreateInfoEXT( flags_, discardRectangleMode_, discardRectangleCount_, pDiscardRectangles_ ) - {} - - PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) - : layout::PipelineDiscardRectangleStateCreateInfoEXT( rhs ) - {} - - PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineDiscardRectangleStateCreateInfoEXT & setFlags( vk::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( vk::DiscardRectangleModeEXT discardRectangleMode_ ) - { - discardRectangleMode = discardRectangleMode_; - return *this; - } - - PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) - { - discardRectangleCount = discardRectangleCount_; - return *this; - } - - PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const vk::Rect2D* pDiscardRectangles_ ) - { - pDiscardRectangles = pDiscardRectangles_; - return *this; - } - - operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineDiscardRectangleStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( discardRectangleMode == rhs.discardRectangleMode ) - && ( discardRectangleCount == rhs.discardRectangleCount ) - && ( pDiscardRectangles == rhs.pDiscardRectangles ); - } - - bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineDiscardRectangleStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineExecutableInfoKHR - { - protected: - PipelineExecutableInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline(), - uint32_t executableIndex_ = 0 ) - : pipeline( pipeline_ ) - , executableIndex( executableIndex_ ) - {} - - PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineExecutableInfoKHR; - const void* pNext = nullptr; - vk::Pipeline pipeline; - uint32_t executableIndex; - }; - static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct PipelineExecutableInfoKHR : public layout::PipelineExecutableInfoKHR - { - PipelineExecutableInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline(), - uint32_t executableIndex_ = 0 ) - : layout::PipelineExecutableInfoKHR( pipeline_, executableIndex_ ) - {} - - PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) - : layout::PipelineExecutableInfoKHR( rhs ) - {} - - PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineExecutableInfoKHR & setPipeline( vk::Pipeline pipeline_ ) - { - pipeline = pipeline_; - return *this; - } - - PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) - { - executableIndex = executableIndex_; - return *this; - } - - operator VkPipelineExecutableInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineExecutableInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ) - && ( executableIndex == rhs.executableIndex ); - } - - bool operator!=( PipelineExecutableInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineExecutableInfoKHR::sType; - }; - static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineExecutableInternalRepresentationKHR - { - protected: - PipelineExecutableInternalRepresentationKHR( std::array const& name_ = { { 0 } }, - std::array const& description_ = { { 0 } }, - vk::Bool32 isText_ = 0, - size_t dataSize_ = 0, - void* pData_ = nullptr ) - : isText( isText_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) - { - memcpy( &name, name_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - memcpy( &description, description_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - - } - - PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; - void* pNext = nullptr; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - vk::Bool32 isText; - size_t dataSize; - void* pData; - }; - static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "layout struct and wrapper have different size!" ); - } - - struct PipelineExecutableInternalRepresentationKHR : public layout::PipelineExecutableInternalRepresentationKHR - { - PipelineExecutableInternalRepresentationKHR( std::array const& name_ = { { 0 } }, - std::array const& description_ = { { 0 } }, - vk::Bool32 isText_ = 0, - size_t dataSize_ = 0, - void* pData_ = nullptr ) - : layout::PipelineExecutableInternalRepresentationKHR( name_, description_, isText_, dataSize_, pData_ ) - {} - - PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) - : layout::PipelineExecutableInternalRepresentationKHR( rhs ) - {} - - PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineExecutableInternalRepresentationKHR & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineExecutableInternalRepresentationKHR & setName( std::array name_ ) - { - memcpy( name, name_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - return *this; - } - - PipelineExecutableInternalRepresentationKHR & setDescription( std::array description_ ) - { - memcpy( description, description_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - return *this; - } - - PipelineExecutableInternalRepresentationKHR & setIsText( vk::Bool32 isText_ ) - { - isText = isText_; - return *this; - } - - PipelineExecutableInternalRepresentationKHR & setDataSize( size_t dataSize_ ) - { - dataSize = dataSize_; - return *this; - } - - PipelineExecutableInternalRepresentationKHR & setPData( void* pData_ ) - { - pData = pData_; - return *this; - } - - operator VkPipelineExecutableInternalRepresentationKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableInternalRepresentationKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( isText == rhs.isText ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); - } - - bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineExecutableInternalRepresentationKHR::sType; - }; - static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineExecutablePropertiesKHR - { - protected: - PipelineExecutablePropertiesKHR( vk::ShaderStageFlags stages_ = vk::ShaderStageFlags(), - std::array const& name_ = { { 0 } }, - std::array const& description_ = { { 0 } }, - uint32_t subgroupSize_ = 0 ) - : stages( stages_ ) - , subgroupSize( subgroupSize_ ) - { - memcpy( &name, name_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - memcpy( &description, description_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - - } - - PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; - void* pNext = nullptr; - vk::ShaderStageFlags stages; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - uint32_t subgroupSize; - }; - static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct PipelineExecutablePropertiesKHR : public layout::PipelineExecutablePropertiesKHR - { - operator VkPipelineExecutablePropertiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutablePropertiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( stages == rhs.stages ) - && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) - && ( subgroupSize == rhs.subgroupSize ); - } - - bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineExecutablePropertiesKHR::sType; - }; - static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - union PipelineExecutableStatisticValueKHR - { - operator VkPipelineExecutableStatisticValueKHR const&() const - { - return *reinterpret_cast(this); - } - - operator VkPipelineExecutableStatisticValueKHR &() - { - return *reinterpret_cast(this); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - vk::Bool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#else - VkBool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - namespace layout - { - struct PipelineExecutableStatisticKHR - { - protected: - PipelineExecutableStatisticKHR( std::array const& name_ = { { 0 } }, - std::array const& description_ = { { 0 } }, - vk::PipelineExecutableStatisticFormatKHR format_ = vk::PipelineExecutableStatisticFormatKHR::eBool32, - vk::PipelineExecutableStatisticValueKHR value_ = vk::PipelineExecutableStatisticValueKHR() ) - : format( format_ ) - , value( value_ ) - { - memcpy( &name, name_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - memcpy( &description, description_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); - - } - - PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; - void* pNext = nullptr; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - vk::PipelineExecutableStatisticFormatKHR format; - vk::PipelineExecutableStatisticValueKHR value; - }; - static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "layout struct and wrapper have different size!" ); - } - - struct PipelineExecutableStatisticKHR : public layout::PipelineExecutableStatisticKHR - { - operator VkPipelineExecutableStatisticKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineExecutableStatisticKHR &() - { - return *reinterpret_cast( this ); - } - - private: - using layout::PipelineExecutableStatisticKHR::sType; - }; - static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineInfoKHR - { - protected: - PipelineInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline() ) - : pipeline( pipeline_ ) - {} - - PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineInfoKHR; - const void* pNext = nullptr; - vk::Pipeline pipeline; - }; - static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct PipelineInfoKHR : public layout::PipelineInfoKHR - { - PipelineInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline() ) - : layout::PipelineInfoKHR( pipeline_ ) - {} - - PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) - : layout::PipelineInfoKHR( rhs ) - {} - - PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineInfoKHR & setPipeline( vk::Pipeline pipeline_ ) - { - pipeline = pipeline_; - return *this; - } - - operator VkPipelineInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( pipeline == rhs.pipeline ); - } - - bool operator!=( PipelineInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineInfoKHR::sType; - }; - static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PushConstantRange - { - PushConstantRange( vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags(), - uint32_t offset_ = 0, - uint32_t size_ = 0 ) - : stageFlags( stageFlags_ ) - , offset( offset_ ) - , size( size_ ) - {} - - PushConstantRange( VkPushConstantRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PushConstantRange& operator=( VkPushConstantRange const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PushConstantRange & setStageFlags( vk::ShaderStageFlags stageFlags_ ) - { - stageFlags = stageFlags_; - return *this; - } - - PushConstantRange & setOffset( uint32_t offset_ ) - { - offset = offset_; - return *this; - } - - PushConstantRange & setSize( uint32_t size_ ) - { - size = size_; - return *this; - } - - operator VkPushConstantRange const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPushConstantRange &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PushConstantRange const& rhs ) const - { - return ( stageFlags == rhs.stageFlags ) - && ( offset == rhs.offset ) - && ( size == rhs.size ); - } - - bool operator!=( PushConstantRange const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ShaderStageFlags stageFlags; - uint32_t offset; - uint32_t size; - }; - static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineLayoutCreateInfo - { - protected: - PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags flags_ = vk::PipelineLayoutCreateFlags(), - uint32_t setLayoutCount_ = 0, - const vk::DescriptorSetLayout* pSetLayouts_ = nullptr, - uint32_t pushConstantRangeCount_ = 0, - const vk::PushConstantRange* pPushConstantRanges_ = nullptr ) - : flags( flags_ ) - , setLayoutCount( setLayoutCount_ ) - , pSetLayouts( pSetLayouts_ ) - , pushConstantRangeCount( pushConstantRangeCount_ ) - , pPushConstantRanges( pPushConstantRanges_ ) - {} - - PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineLayoutCreateInfo; - const void* pNext = nullptr; - vk::PipelineLayoutCreateFlags flags; - uint32_t setLayoutCount; - const vk::DescriptorSetLayout* pSetLayouts; - uint32_t pushConstantRangeCount; - const vk::PushConstantRange* pPushConstantRanges; - }; - static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineLayoutCreateInfo : public layout::PipelineLayoutCreateInfo - { - PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags flags_ = vk::PipelineLayoutCreateFlags(), - uint32_t setLayoutCount_ = 0, - const vk::DescriptorSetLayout* pSetLayouts_ = nullptr, - uint32_t pushConstantRangeCount_ = 0, - const vk::PushConstantRange* pPushConstantRanges_ = nullptr ) - : layout::PipelineLayoutCreateInfo( flags_, setLayoutCount_, pSetLayouts_, pushConstantRangeCount_, pPushConstantRanges_ ) - {} - - PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) - : layout::PipelineLayoutCreateInfo( rhs ) - {} - - PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineLayoutCreateInfo & setFlags( vk::PipelineLayoutCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) - { - setLayoutCount = setLayoutCount_; - return *this; - } - - PipelineLayoutCreateInfo & setPSetLayouts( const vk::DescriptorSetLayout* pSetLayouts_ ) - { - pSetLayouts = pSetLayouts_; - return *this; - } - - PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - PipelineLayoutCreateInfo & setPPushConstantRanges( const vk::PushConstantRange* pPushConstantRanges_ ) - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - - operator VkPipelineLayoutCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineLayoutCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineLayoutCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( setLayoutCount == rhs.setLayoutCount ) - && ( pSetLayouts == rhs.pSetLayouts ) - && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) - && ( pPushConstantRanges == rhs.pPushConstantRanges ); - } - - bool operator!=( PipelineLayoutCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineLayoutCreateInfo::sType; - }; - static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRasterizationConservativeStateCreateInfoEXT - { - protected: - PipelineRasterizationConservativeStateCreateInfoEXT( vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = vk::PipelineRasterizationConservativeStateCreateFlagsEXT(), - vk::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = vk::ConservativeRasterizationModeEXT::eDisabled, - float extraPrimitiveOverestimationSize_ = 0 ) - : flags( flags_ ) - , conservativeRasterizationMode( conservativeRasterizationMode_ ) - , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) - {} - - PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; - const void* pNext = nullptr; - vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags; - vk::ConservativeRasterizationModeEXT conservativeRasterizationMode; - float extraPrimitiveOverestimationSize; - }; - static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRasterizationConservativeStateCreateInfoEXT : public layout::PipelineRasterizationConservativeStateCreateInfoEXT - { - PipelineRasterizationConservativeStateCreateInfoEXT( vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = vk::PipelineRasterizationConservativeStateCreateFlagsEXT(), - vk::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = vk::ConservativeRasterizationModeEXT::eDisabled, - float extraPrimitiveOverestimationSize_ = 0 ) - : layout::PipelineRasterizationConservativeStateCreateInfoEXT( flags_, conservativeRasterizationMode_, extraPrimitiveOverestimationSize_ ) - {} - - PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) - : layout::PipelineRasterizationConservativeStateCreateInfoEXT( rhs ) - {} - - PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( vk::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) - { - conservativeRasterizationMode = conservativeRasterizationMode_; - return *this; - } - - PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) - { - extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; - return *this; - } - - operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) - && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); - } - - bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRasterizationConservativeStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRasterizationDepthClipStateCreateInfoEXT - { - protected: - PipelineRasterizationDepthClipStateCreateInfoEXT( vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = vk::PipelineRasterizationDepthClipStateCreateFlagsEXT(), - vk::Bool32 depthClipEnable_ = 0 ) - : flags( flags_ ) - , depthClipEnable( depthClipEnable_ ) - {} - - PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; - const void* pNext = nullptr; - vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags; - vk::Bool32 depthClipEnable; - }; - static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRasterizationDepthClipStateCreateInfoEXT : public layout::PipelineRasterizationDepthClipStateCreateInfoEXT - { - PipelineRasterizationDepthClipStateCreateInfoEXT( vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = vk::PipelineRasterizationDepthClipStateCreateFlagsEXT(), - vk::Bool32 depthClipEnable_ = 0 ) - : layout::PipelineRasterizationDepthClipStateCreateInfoEXT( flags_, depthClipEnable_ ) - {} - - PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) - : layout::PipelineRasterizationDepthClipStateCreateInfoEXT( rhs ) - {} - - PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( vk::Bool32 depthClipEnable_ ) - { - depthClipEnable = depthClipEnable_; - return *this; - } - - operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( depthClipEnable == rhs.depthClipEnable ); - } - - bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRasterizationDepthClipStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRasterizationLineStateCreateInfoEXT - { - protected: - PipelineRasterizationLineStateCreateInfoEXT( vk::LineRasterizationModeEXT lineRasterizationMode_ = vk::LineRasterizationModeEXT::eDefault, - vk::Bool32 stippledLineEnable_ = 0, - uint32_t lineStippleFactor_ = 0, - uint16_t lineStipplePattern_ = 0 ) - : lineRasterizationMode( lineRasterizationMode_ ) - , stippledLineEnable( stippledLineEnable_ ) - , lineStippleFactor( lineStippleFactor_ ) - , lineStipplePattern( lineStipplePattern_ ) - {} - - PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; - const void* pNext = nullptr; - vk::LineRasterizationModeEXT lineRasterizationMode; - vk::Bool32 stippledLineEnable; - uint32_t lineStippleFactor; - uint16_t lineStipplePattern; - }; - static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRasterizationLineStateCreateInfoEXT : public layout::PipelineRasterizationLineStateCreateInfoEXT - { - PipelineRasterizationLineStateCreateInfoEXT( vk::LineRasterizationModeEXT lineRasterizationMode_ = vk::LineRasterizationModeEXT::eDefault, - vk::Bool32 stippledLineEnable_ = 0, - uint32_t lineStippleFactor_ = 0, - uint16_t lineStipplePattern_ = 0 ) - : layout::PipelineRasterizationLineStateCreateInfoEXT( lineRasterizationMode_, stippledLineEnable_, lineStippleFactor_, lineStipplePattern_ ) - {} - - PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) - : layout::PipelineRasterizationLineStateCreateInfoEXT( rhs ) - {} - - PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( vk::LineRasterizationModeEXT lineRasterizationMode_ ) - { - lineRasterizationMode = lineRasterizationMode_; - return *this; - } - - PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( vk::Bool32 stippledLineEnable_ ) - { - stippledLineEnable = stippledLineEnable_; - return *this; - } - - PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) - { - lineStippleFactor = lineStippleFactor_; - return *this; - } - - PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) - { - lineStipplePattern = lineStipplePattern_; - return *this; - } - - operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRasterizationLineStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( lineRasterizationMode == rhs.lineRasterizationMode ) - && ( stippledLineEnable == rhs.stippledLineEnable ) - && ( lineStippleFactor == rhs.lineStippleFactor ) - && ( lineStipplePattern == rhs.lineStipplePattern ); - } - - bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRasterizationLineStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRasterizationStateRasterizationOrderAMD - { - protected: - PipelineRasterizationStateRasterizationOrderAMD( vk::RasterizationOrderAMD rasterizationOrder_ = vk::RasterizationOrderAMD::eStrict ) - : rasterizationOrder( rasterizationOrder_ ) - {} - - PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; - const void* pNext = nullptr; - vk::RasterizationOrderAMD rasterizationOrder; - }; - static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRasterizationStateRasterizationOrderAMD : public layout::PipelineRasterizationStateRasterizationOrderAMD - { - PipelineRasterizationStateRasterizationOrderAMD( vk::RasterizationOrderAMD rasterizationOrder_ = vk::RasterizationOrderAMD::eStrict ) - : layout::PipelineRasterizationStateRasterizationOrderAMD( rasterizationOrder_ ) - {} - - PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) - : layout::PipelineRasterizationStateRasterizationOrderAMD( rhs ) - {} - - PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( vk::RasterizationOrderAMD rasterizationOrder_ ) - { - rasterizationOrder = rasterizationOrder_; - return *this; - } - - operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRasterizationStateRasterizationOrderAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( rasterizationOrder == rhs.rasterizationOrder ); - } - - bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRasterizationStateRasterizationOrderAMD::sType; - }; - static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRasterizationStateStreamCreateInfoEXT - { - protected: - PipelineRasterizationStateStreamCreateInfoEXT( vk::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = vk::PipelineRasterizationStateStreamCreateFlagsEXT(), - uint32_t rasterizationStream_ = 0 ) - : flags( flags_ ) - , rasterizationStream( rasterizationStream_ ) - {} - - PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; - const void* pNext = nullptr; - vk::PipelineRasterizationStateStreamCreateFlagsEXT flags; - uint32_t rasterizationStream; - }; - static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRasterizationStateStreamCreateInfoEXT : public layout::PipelineRasterizationStateStreamCreateInfoEXT - { - PipelineRasterizationStateStreamCreateInfoEXT( vk::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = vk::PipelineRasterizationStateStreamCreateFlagsEXT(), - uint32_t rasterizationStream_ = 0 ) - : layout::PipelineRasterizationStateStreamCreateInfoEXT( flags_, rasterizationStream_ ) - {} - - PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) - : layout::PipelineRasterizationStateStreamCreateInfoEXT( rhs ) - {} - - PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRasterizationStateStreamCreateInfoEXT & setFlags( vk::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) - { - rasterizationStream = rasterizationStream_; - return *this; - } - - operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRasterizationStateStreamCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( rasterizationStream == rhs.rasterizationStream ); - } - - bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRasterizationStateStreamCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineRepresentativeFragmentTestStateCreateInfoNV - { - protected: - PipelineRepresentativeFragmentTestStateCreateInfoNV( vk::Bool32 representativeFragmentTestEnable_ = 0 ) - : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) - {} - - PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; - const void* pNext = nullptr; - vk::Bool32 representativeFragmentTestEnable; - }; - static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineRepresentativeFragmentTestStateCreateInfoNV : public layout::PipelineRepresentativeFragmentTestStateCreateInfoNV - { - PipelineRepresentativeFragmentTestStateCreateInfoNV( vk::Bool32 representativeFragmentTestEnable_ = 0 ) - : layout::PipelineRepresentativeFragmentTestStateCreateInfoNV( representativeFragmentTestEnable_ ) - {} - - PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) - : layout::PipelineRepresentativeFragmentTestStateCreateInfoNV( rhs ) - {} - - PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( vk::Bool32 representativeFragmentTestEnable_ ) - { - representativeFragmentTestEnable = representativeFragmentTestEnable_; - return *this; - } - - operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); - } - - bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineRepresentativeFragmentTestStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineSampleLocationsStateCreateInfoEXT - { - protected: - PipelineSampleLocationsStateCreateInfoEXT( vk::Bool32 sampleLocationsEnable_ = 0, - vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) - : sampleLocationsEnable( sampleLocationsEnable_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) - {} - - PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; - const void* pNext = nullptr; - vk::Bool32 sampleLocationsEnable; - vk::SampleLocationsInfoEXT sampleLocationsInfo; - }; - static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineSampleLocationsStateCreateInfoEXT : public layout::PipelineSampleLocationsStateCreateInfoEXT - { - PipelineSampleLocationsStateCreateInfoEXT( vk::Bool32 sampleLocationsEnable_ = 0, - vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) - : layout::PipelineSampleLocationsStateCreateInfoEXT( sampleLocationsEnable_, sampleLocationsInfo_ ) - {} - - PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) - : layout::PipelineSampleLocationsStateCreateInfoEXT( rhs ) - {} - - PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( vk::Bool32 sampleLocationsEnable_ ) - { - sampleLocationsEnable = sampleLocationsEnable_; - return *this; - } - - PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( vk::SampleLocationsInfoEXT sampleLocationsInfo_ ) - { - sampleLocationsInfo = sampleLocationsInfo_; - return *this; - } - - operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineSampleLocationsStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); - } - - bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineSampleLocationsStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT - { - protected: - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = 0 ) - : requiredSubgroupSize( requiredSubgroupSize_ ) - {} - - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; - void* pNext = nullptr; - uint32_t requiredSubgroupSize; - }; - static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : public layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT - { - operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); - } - - bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineTessellationDomainOriginStateCreateInfo - { - protected: - PipelineTessellationDomainOriginStateCreateInfo( vk::TessellationDomainOrigin domainOrigin_ = vk::TessellationDomainOrigin::eUpperLeft ) - : domainOrigin( domainOrigin_ ) - {} - - PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; - const void* pNext = nullptr; - vk::TessellationDomainOrigin domainOrigin; - }; - static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct PipelineTessellationDomainOriginStateCreateInfo : public layout::PipelineTessellationDomainOriginStateCreateInfo - { - PipelineTessellationDomainOriginStateCreateInfo( vk::TessellationDomainOrigin domainOrigin_ = vk::TessellationDomainOrigin::eUpperLeft ) - : layout::PipelineTessellationDomainOriginStateCreateInfo( domainOrigin_ ) - {} - - PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) - : layout::PipelineTessellationDomainOriginStateCreateInfo( rhs ) - {} - - PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( vk::TessellationDomainOrigin domainOrigin_ ) - { - domainOrigin = domainOrigin_; - return *this; - } - - operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineTessellationDomainOriginStateCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( domainOrigin == rhs.domainOrigin ); - } - - bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineTessellationDomainOriginStateCreateInfo::sType; - }; - static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct VertexInputBindingDivisorDescriptionEXT - { - VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0, - uint32_t divisor_ = 0 ) - : binding( binding_ ) - , divisor( divisor_ ) - {} - - VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) - { - binding = binding_; - return *this; - } - - VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) - { - divisor = divisor_; - return *this; - } - - operator VkVertexInputBindingDivisorDescriptionEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkVertexInputBindingDivisorDescriptionEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const - { - return ( binding == rhs.binding ) - && ( divisor == rhs.divisor ); - } - - bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t binding; - uint32_t divisor; - }; - static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineVertexInputDivisorStateCreateInfoEXT - { - protected: - PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0, - const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) - : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) - , pVertexBindingDivisors( pVertexBindingDivisors_ ) - {} - - PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; - const void* pNext = nullptr; - uint32_t vertexBindingDivisorCount; - const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; - }; - static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct PipelineVertexInputDivisorStateCreateInfoEXT : public layout::PipelineVertexInputDivisorStateCreateInfoEXT - { - PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0, - const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) - : layout::PipelineVertexInputDivisorStateCreateInfoEXT( vertexBindingDivisorCount_, pVertexBindingDivisors_ ) - {} - - PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) - : layout::PipelineVertexInputDivisorStateCreateInfoEXT( rhs ) - {} - - PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) - { - vertexBindingDivisorCount = vertexBindingDivisorCount_; - return *this; - } - - PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) - { - pVertexBindingDivisors = pVertexBindingDivisors_; - return *this; - } - - operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) - && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); - } - - bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineVertexInputDivisorStateCreateInfoEXT::sType; - }; - static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineViewportCoarseSampleOrderStateCreateInfoNV - { - protected: - PipelineViewportCoarseSampleOrderStateCreateInfoNV( vk::CoarseSampleOrderTypeNV sampleOrderType_ = vk::CoarseSampleOrderTypeNV::eDefault, - uint32_t customSampleOrderCount_ = 0, - const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr ) - : sampleOrderType( sampleOrderType_ ) - , customSampleOrderCount( customSampleOrderCount_ ) - , pCustomSampleOrders( pCustomSampleOrders_ ) - {} - - PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; - const void* pNext = nullptr; - vk::CoarseSampleOrderTypeNV sampleOrderType; - uint32_t customSampleOrderCount; - const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders; - }; - static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineViewportCoarseSampleOrderStateCreateInfoNV : public layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV - { - PipelineViewportCoarseSampleOrderStateCreateInfoNV( vk::CoarseSampleOrderTypeNV sampleOrderType_ = vk::CoarseSampleOrderTypeNV::eDefault, - uint32_t customSampleOrderCount_ = 0, - const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr ) - : layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV( sampleOrderType_, customSampleOrderCount_, pCustomSampleOrders_ ) - {} - - PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) - : layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV( rhs ) - {} - - PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( vk::CoarseSampleOrderTypeNV sampleOrderType_ ) - { - sampleOrderType = sampleOrderType_; - return *this; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) - { - customSampleOrderCount = customSampleOrderCount_; - return *this; - } - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) - { - pCustomSampleOrders = pCustomSampleOrders_; - return *this; - } - - operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sampleOrderType == rhs.sampleOrderType ) - && ( customSampleOrderCount == rhs.customSampleOrderCount ) - && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); - } - - bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineViewportExclusiveScissorStateCreateInfoNV - { - protected: - PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0, - const vk::Rect2D* pExclusiveScissors_ = nullptr ) - : exclusiveScissorCount( exclusiveScissorCount_ ) - , pExclusiveScissors( pExclusiveScissors_ ) - {} - - PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; - const void* pNext = nullptr; - uint32_t exclusiveScissorCount; - const vk::Rect2D* pExclusiveScissors; - }; - static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineViewportExclusiveScissorStateCreateInfoNV : public layout::PipelineViewportExclusiveScissorStateCreateInfoNV - { - PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0, - const vk::Rect2D* pExclusiveScissors_ = nullptr ) - : layout::PipelineViewportExclusiveScissorStateCreateInfoNV( exclusiveScissorCount_, pExclusiveScissors_ ) - {} - - PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) - : layout::PipelineViewportExclusiveScissorStateCreateInfoNV( rhs ) - {} - - PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) - { - exclusiveScissorCount = exclusiveScissorCount_; - return *this; - } - - PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const vk::Rect2D* pExclusiveScissors_ ) - { - pExclusiveScissors = pExclusiveScissors_; - return *this; - } - - operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) - && ( pExclusiveScissors == rhs.pExclusiveScissors ); - } - - bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineViewportExclusiveScissorStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ShadingRatePaletteNV - { - ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = 0, - const vk::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = nullptr ) - : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) - , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) - {} - - ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) - { - shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; - return *this; - } - - ShadingRatePaletteNV & setPShadingRatePaletteEntries( const vk::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) - { - pShadingRatePaletteEntries = pShadingRatePaletteEntries_; - return *this; - } - - operator VkShadingRatePaletteNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkShadingRatePaletteNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ShadingRatePaletteNV const& rhs ) const - { - return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) - && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); - } - - bool operator!=( ShadingRatePaletteNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t shadingRatePaletteEntryCount; - const vk::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries; - }; - static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineViewportShadingRateImageStateCreateInfoNV - { - protected: - PipelineViewportShadingRateImageStateCreateInfoNV( vk::Bool32 shadingRateImageEnable_ = 0, - uint32_t viewportCount_ = 0, - const vk::ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr ) - : shadingRateImageEnable( shadingRateImageEnable_ ) - , viewportCount( viewportCount_ ) - , pShadingRatePalettes( pShadingRatePalettes_ ) - {} - - PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; - const void* pNext = nullptr; - vk::Bool32 shadingRateImageEnable; - uint32_t viewportCount; - const vk::ShadingRatePaletteNV* pShadingRatePalettes; - }; - static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineViewportShadingRateImageStateCreateInfoNV : public layout::PipelineViewportShadingRateImageStateCreateInfoNV - { - PipelineViewportShadingRateImageStateCreateInfoNV( vk::Bool32 shadingRateImageEnable_ = 0, - uint32_t viewportCount_ = 0, - const vk::ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr ) - : layout::PipelineViewportShadingRateImageStateCreateInfoNV( shadingRateImageEnable_, viewportCount_, pShadingRatePalettes_ ) - {} - - PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) - : layout::PipelineViewportShadingRateImageStateCreateInfoNV( rhs ) - {} - - PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( vk::Bool32 shadingRateImageEnable_ ) - { - shadingRateImageEnable = shadingRateImageEnable_; - return *this; - } - - PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) - { - viewportCount = viewportCount_; - return *this; - } - - PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const vk::ShadingRatePaletteNV* pShadingRatePalettes_ ) - { - pShadingRatePalettes = pShadingRatePalettes_; - return *this; - } - - operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) - && ( viewportCount == rhs.viewportCount ) - && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); - } - - bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineViewportShadingRateImageStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ViewportSwizzleNV - { - ViewportSwizzleNV( vk::ViewportCoordinateSwizzleNV x_ = vk::ViewportCoordinateSwizzleNV::ePositiveX, - vk::ViewportCoordinateSwizzleNV y_ = vk::ViewportCoordinateSwizzleNV::ePositiveX, - vk::ViewportCoordinateSwizzleNV z_ = vk::ViewportCoordinateSwizzleNV::ePositiveX, - vk::ViewportCoordinateSwizzleNV w_ = vk::ViewportCoordinateSwizzleNV::ePositiveX ) - : x( x_ ) - , y( y_ ) - , z( z_ ) - , w( w_ ) - {} - - ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ViewportSwizzleNV & setX( vk::ViewportCoordinateSwizzleNV x_ ) - { - x = x_; - return *this; - } - - ViewportSwizzleNV & setY( vk::ViewportCoordinateSwizzleNV y_ ) - { - y = y_; - return *this; - } - - ViewportSwizzleNV & setZ( vk::ViewportCoordinateSwizzleNV z_ ) - { - z = z_; - return *this; - } - - ViewportSwizzleNV & setW( vk::ViewportCoordinateSwizzleNV w_ ) - { - w = w_; - return *this; - } - - operator VkViewportSwizzleNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkViewportSwizzleNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ViewportSwizzleNV const& rhs ) const - { - return ( x == rhs.x ) - && ( y == rhs.y ) - && ( z == rhs.z ) - && ( w == rhs.w ); - } - - bool operator!=( ViewportSwizzleNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ViewportCoordinateSwizzleNV x; - vk::ViewportCoordinateSwizzleNV y; - vk::ViewportCoordinateSwizzleNV z; - vk::ViewportCoordinateSwizzleNV w; - }; - static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineViewportSwizzleStateCreateInfoNV - { - protected: - PipelineViewportSwizzleStateCreateInfoNV( vk::PipelineViewportSwizzleStateCreateFlagsNV flags_ = vk::PipelineViewportSwizzleStateCreateFlagsNV(), - uint32_t viewportCount_ = 0, - const vk::ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) - : flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewportSwizzles( pViewportSwizzles_ ) - {} - - PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; - const void* pNext = nullptr; - vk::PipelineViewportSwizzleStateCreateFlagsNV flags; - uint32_t viewportCount; - const vk::ViewportSwizzleNV* pViewportSwizzles; - }; - static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineViewportSwizzleStateCreateInfoNV : public layout::PipelineViewportSwizzleStateCreateInfoNV - { - PipelineViewportSwizzleStateCreateInfoNV( vk::PipelineViewportSwizzleStateCreateFlagsNV flags_ = vk::PipelineViewportSwizzleStateCreateFlagsNV(), - uint32_t viewportCount_ = 0, - const vk::ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) - : layout::PipelineViewportSwizzleStateCreateInfoNV( flags_, viewportCount_, pViewportSwizzles_ ) - {} - - PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) - : layout::PipelineViewportSwizzleStateCreateInfoNV( rhs ) - {} - - PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineViewportSwizzleStateCreateInfoNV & setFlags( vk::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) - { - flags = flags_; - return *this; - } - - PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) - { - viewportCount = viewportCount_; - return *this; - } - - PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const vk::ViewportSwizzleNV* pViewportSwizzles_ ) - { - pViewportSwizzles = pViewportSwizzles_; - return *this; - } - - operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineViewportSwizzleStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewportSwizzles == rhs.pViewportSwizzles ); - } - - bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineViewportSwizzleStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ViewportWScalingNV - { - ViewportWScalingNV( float xcoeff_ = 0, - float ycoeff_ = 0 ) - : xcoeff( xcoeff_ ) - , ycoeff( ycoeff_ ) - {} - - ViewportWScalingNV( VkViewportWScalingNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ViewportWScalingNV & setXcoeff( float xcoeff_ ) - { - xcoeff = xcoeff_; - return *this; - } - - ViewportWScalingNV & setYcoeff( float ycoeff_ ) - { - ycoeff = ycoeff_; - return *this; - } - - operator VkViewportWScalingNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkViewportWScalingNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ViewportWScalingNV const& rhs ) const - { - return ( xcoeff == rhs.xcoeff ) - && ( ycoeff == rhs.ycoeff ); - } - - bool operator!=( ViewportWScalingNV const& rhs ) const - { - return !operator==( rhs ); - } - - public: - float xcoeff; - float ycoeff; - }; - static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PipelineViewportWScalingStateCreateInfoNV - { - protected: - PipelineViewportWScalingStateCreateInfoNV( vk::Bool32 viewportWScalingEnable_ = 0, - uint32_t viewportCount_ = 0, - const vk::ViewportWScalingNV* pViewportWScalings_ = nullptr ) - : viewportWScalingEnable( viewportWScalingEnable_ ) - , viewportCount( viewportCount_ ) - , pViewportWScalings( pViewportWScalings_ ) - {} - - PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; - const void* pNext = nullptr; - vk::Bool32 viewportWScalingEnable; - uint32_t viewportCount; - const vk::ViewportWScalingNV* pViewportWScalings; - }; - static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct PipelineViewportWScalingStateCreateInfoNV : public layout::PipelineViewportWScalingStateCreateInfoNV - { - PipelineViewportWScalingStateCreateInfoNV( vk::Bool32 viewportWScalingEnable_ = 0, - uint32_t viewportCount_ = 0, - const vk::ViewportWScalingNV* pViewportWScalings_ = nullptr ) - : layout::PipelineViewportWScalingStateCreateInfoNV( viewportWScalingEnable_, viewportCount_, pViewportWScalings_ ) - {} - - PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) - : layout::PipelineViewportWScalingStateCreateInfoNV( rhs ) - {} - - PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( vk::Bool32 viewportWScalingEnable_ ) - { - viewportWScalingEnable = viewportWScalingEnable_; - return *this; - } - - PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) - { - viewportCount = viewportCount_; - return *this; - } - - PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const vk::ViewportWScalingNV* pViewportWScalings_ ) - { - pViewportWScalings = pViewportWScalings_; - return *this; - } - - operator VkPipelineViewportWScalingStateCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPipelineViewportWScalingStateCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) - && ( viewportCount == rhs.viewportCount ) - && ( pViewportWScalings == rhs.pViewportWScalings ); - } - - bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PipelineViewportWScalingStateCreateInfoNV::sType; - }; - static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_GGP - - namespace layout - { - struct PresentFrameTokenGGP - { - protected: - PresentFrameTokenGGP( GgpFrameToken frameToken_ = 0 ) - : frameToken( frameToken_ ) - {} - - PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePresentFrameTokenGGP; - const void* pNext = nullptr; - GgpFrameToken frameToken; - }; - static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "layout struct and wrapper have different size!" ); - } - - struct PresentFrameTokenGGP : public layout::PresentFrameTokenGGP - { - PresentFrameTokenGGP( GgpFrameToken frameToken_ = 0 ) - : layout::PresentFrameTokenGGP( frameToken_ ) - {} - - PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) - : layout::PresentFrameTokenGGP( rhs ) - {} - - PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PresentFrameTokenGGP & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) - { - frameToken = frameToken_; - return *this; - } - - operator VkPresentFrameTokenGGP const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPresentFrameTokenGGP &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentFrameTokenGGP const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( frameToken == rhs.frameToken ); - } - - bool operator!=( PresentFrameTokenGGP const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PresentFrameTokenGGP::sType; - }; - static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_GGP*/ - - namespace layout - { - struct PresentInfoKHR - { - protected: - PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, - const vk::Semaphore* pWaitSemaphores_ = nullptr, - uint32_t swapchainCount_ = 0, - const vk::SwapchainKHR* pSwapchains_ = nullptr, - const uint32_t* pImageIndices_ = nullptr, - vk::Result* pResults_ = nullptr ) - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , swapchainCount( swapchainCount_ ) - , pSwapchains( pSwapchains_ ) - , pImageIndices( pImageIndices_ ) - , pResults( pResults_ ) - {} - - PresentInfoKHR( VkPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePresentInfoKHR; - const void* pNext = nullptr; - uint32_t waitSemaphoreCount; - const vk::Semaphore* pWaitSemaphores; - uint32_t swapchainCount; - const vk::SwapchainKHR* pSwapchains; - const uint32_t* pImageIndices; - vk::Result* pResults; - }; - static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct PresentInfoKHR : public layout::PresentInfoKHR - { - PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, - const vk::Semaphore* pWaitSemaphores_ = nullptr, - uint32_t swapchainCount_ = 0, - const vk::SwapchainKHR* pSwapchains_ = nullptr, - const uint32_t* pImageIndices_ = nullptr, - vk::Result* pResults_ = nullptr ) - : layout::PresentInfoKHR( waitSemaphoreCount_, pWaitSemaphores_, swapchainCount_, pSwapchains_, pImageIndices_, pResults_ ) - {} - - PresentInfoKHR( VkPresentInfoKHR const & rhs ) - : layout::PresentInfoKHR( rhs ) - {} - - PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PresentInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - PresentInfoKHR & setPWaitSemaphores( const vk::Semaphore* pWaitSemaphores_ ) - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) - { - swapchainCount = swapchainCount_; - return *this; - } - - PresentInfoKHR & setPSwapchains( const vk::SwapchainKHR* pSwapchains_ ) - { - pSwapchains = pSwapchains_; - return *this; - } - - PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) - { - pImageIndices = pImageIndices_; - return *this; - } - - PresentInfoKHR & setPResults( vk::Result* pResults_ ) - { - pResults = pResults_; - return *this; - } - - operator VkPresentInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPresentInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pSwapchains == rhs.pSwapchains ) - && ( pImageIndices == rhs.pImageIndices ) - && ( pResults == rhs.pResults ); - } - - bool operator!=( PresentInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PresentInfoKHR::sType; - }; - static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RectLayerKHR - { - RectLayerKHR( vk::Offset2D offset_ = vk::Offset2D(), - vk::Extent2D extent_ = vk::Extent2D(), - uint32_t layer_ = 0 ) - : offset( offset_ ) - , extent( extent_ ) - , layer( layer_ ) - {} - - explicit RectLayerKHR( Rect2D const& rect2D, - uint32_t layer_ = 0 ) - : offset( rect2D.offset ) - , extent( rect2D.extent ) - , layer( layer_ ) - - {} - - RectLayerKHR( VkRectLayerKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RectLayerKHR & setOffset( vk::Offset2D offset_ ) - { - offset = offset_; - return *this; - } - - RectLayerKHR & setExtent( vk::Extent2D extent_ ) - { - extent = extent_; - return *this; - } - - RectLayerKHR & setLayer( uint32_t layer_ ) - { - layer = layer_; - return *this; - } - - operator VkRectLayerKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRectLayerKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RectLayerKHR const& rhs ) const - { - return ( offset == rhs.offset ) - && ( extent == rhs.extent ) - && ( layer == rhs.layer ); - } - - bool operator!=( RectLayerKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Offset2D offset; - vk::Extent2D extent; - uint32_t layer; - }; - static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PresentRegionKHR - { - PresentRegionKHR( uint32_t rectangleCount_ = 0, - const vk::RectLayerKHR* pRectangles_ = nullptr ) - : rectangleCount( rectangleCount_ ) - , pRectangles( pRectangles_ ) - {} - - PresentRegionKHR( VkPresentRegionKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) - { - rectangleCount = rectangleCount_; - return *this; - } - - PresentRegionKHR & setPRectangles( const vk::RectLayerKHR* pRectangles_ ) - { - pRectangles = pRectangles_; - return *this; - } - - operator VkPresentRegionKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPresentRegionKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentRegionKHR const& rhs ) const - { - return ( rectangleCount == rhs.rectangleCount ) - && ( pRectangles == rhs.pRectangles ); - } - - bool operator!=( PresentRegionKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t rectangleCount; - const vk::RectLayerKHR* pRectangles; - }; - static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PresentRegionsKHR - { - protected: - PresentRegionsKHR( uint32_t swapchainCount_ = 0, - const vk::PresentRegionKHR* pRegions_ = nullptr ) - : swapchainCount( swapchainCount_ ) - , pRegions( pRegions_ ) - {} - - PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePresentRegionsKHR; - const void* pNext = nullptr; - uint32_t swapchainCount; - const vk::PresentRegionKHR* pRegions; - }; - static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "layout struct and wrapper have different size!" ); - } - - struct PresentRegionsKHR : public layout::PresentRegionsKHR - { - PresentRegionsKHR( uint32_t swapchainCount_ = 0, - const vk::PresentRegionKHR* pRegions_ = nullptr ) - : layout::PresentRegionsKHR( swapchainCount_, pRegions_ ) - {} - - PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) - : layout::PresentRegionsKHR( rhs ) - {} - - PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PresentRegionsKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) - { - swapchainCount = swapchainCount_; - return *this; - } - - PresentRegionsKHR & setPRegions( const vk::PresentRegionKHR* pRegions_ ) - { - pRegions = pRegions_; - return *this; - } - - operator VkPresentRegionsKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPresentRegionsKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentRegionsKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pRegions == rhs.pRegions ); - } - - bool operator!=( PresentRegionsKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PresentRegionsKHR::sType; - }; - static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct PresentTimeGOOGLE - { - PresentTimeGOOGLE( uint32_t presentID_ = 0, - uint64_t desiredPresentTime_ = 0 ) - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) - {} - - PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) - { - presentID = presentID_; - return *this; - } - - PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) - { - desiredPresentTime = desiredPresentTime_; - return *this; - } - - operator VkPresentTimeGOOGLE const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPresentTimeGOOGLE &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentTimeGOOGLE const& rhs ) const - { - return ( presentID == rhs.presentID ) - && ( desiredPresentTime == rhs.desiredPresentTime ); - } - - bool operator!=( PresentTimeGOOGLE const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t presentID; - uint64_t desiredPresentTime; - }; - static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct PresentTimesInfoGOOGLE - { - protected: - PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0, - const vk::PresentTimeGOOGLE* pTimes_ = nullptr ) - : swapchainCount( swapchainCount_ ) - , pTimes( pTimes_ ) - {} - - PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; - const void* pNext = nullptr; - uint32_t swapchainCount; - const vk::PresentTimeGOOGLE* pTimes; - }; - static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "layout struct and wrapper have different size!" ); - } - - struct PresentTimesInfoGOOGLE : public layout::PresentTimesInfoGOOGLE - { - PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0, - const vk::PresentTimeGOOGLE* pTimes_ = nullptr ) - : layout::PresentTimesInfoGOOGLE( swapchainCount_, pTimes_ ) - {} - - PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) - : layout::PresentTimesInfoGOOGLE( rhs ) - {} - - PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) - { - swapchainCount = swapchainCount_; - return *this; - } - - PresentTimesInfoGOOGLE & setPTimes( const vk::PresentTimeGOOGLE* pTimes_ ) - { - pTimes = pTimes_; - return *this; - } - - operator VkPresentTimesInfoGOOGLE const&() const - { - return *reinterpret_cast( this ); - } - - operator VkPresentTimesInfoGOOGLE &() - { - return *reinterpret_cast( this ); - } - - bool operator==( PresentTimesInfoGOOGLE const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( swapchainCount == rhs.swapchainCount ) - && ( pTimes == rhs.pTimes ); - } - - bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::PresentTimesInfoGOOGLE::sType; - }; - static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ProtectedSubmitInfo - { - protected: - ProtectedSubmitInfo( vk::Bool32 protectedSubmit_ = 0 ) - : protectedSubmit( protectedSubmit_ ) - {} - - ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eProtectedSubmitInfo; - const void* pNext = nullptr; - vk::Bool32 protectedSubmit; - }; - static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "layout struct and wrapper have different size!" ); - } - - struct ProtectedSubmitInfo : public layout::ProtectedSubmitInfo - { - ProtectedSubmitInfo( vk::Bool32 protectedSubmit_ = 0 ) - : layout::ProtectedSubmitInfo( protectedSubmit_ ) - {} - - ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) - : layout::ProtectedSubmitInfo( rhs ) - {} - - ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ProtectedSubmitInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ProtectedSubmitInfo & setProtectedSubmit( vk::Bool32 protectedSubmit_ ) - { - protectedSubmit = protectedSubmit_; - return *this; - } - - operator VkProtectedSubmitInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkProtectedSubmitInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ProtectedSubmitInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( protectedSubmit == rhs.protectedSubmit ); - } - - bool operator!=( ProtectedSubmitInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ProtectedSubmitInfo::sType; - }; - static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct QueryPoolCreateInfo - { - protected: - QueryPoolCreateInfo( vk::QueryPoolCreateFlags flags_ = vk::QueryPoolCreateFlags(), - vk::QueryType queryType_ = vk::QueryType::eOcclusion, - uint32_t queryCount_ = 0, - vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) - : flags( flags_ ) - , queryType( queryType_ ) - , queryCount( queryCount_ ) - , pipelineStatistics( pipelineStatistics_ ) - {} - - QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eQueryPoolCreateInfo; - const void* pNext = nullptr; - vk::QueryPoolCreateFlags flags; - vk::QueryType queryType; - uint32_t queryCount; - vk::QueryPipelineStatisticFlags pipelineStatistics; - }; - static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct QueryPoolCreateInfo : public layout::QueryPoolCreateInfo - { - QueryPoolCreateInfo( vk::QueryPoolCreateFlags flags_ = vk::QueryPoolCreateFlags(), - vk::QueryType queryType_ = vk::QueryType::eOcclusion, - uint32_t queryCount_ = 0, - vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) - : layout::QueryPoolCreateInfo( flags_, queryType_, queryCount_, pipelineStatistics_ ) - {} - - QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) - : layout::QueryPoolCreateInfo( rhs ) - {} - - QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - QueryPoolCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - QueryPoolCreateInfo & setFlags( vk::QueryPoolCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - QueryPoolCreateInfo & setQueryType( vk::QueryType queryType_ ) - { - queryType = queryType_; - return *this; - } - - QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) - { - queryCount = queryCount_; - return *this; - } - - QueryPoolCreateInfo & setPipelineStatistics( vk::QueryPipelineStatisticFlags pipelineStatistics_ ) - { - pipelineStatistics = pipelineStatistics_; - return *this; - } - - operator VkQueryPoolCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( QueryPoolCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( queryType == rhs.queryType ) - && ( queryCount == rhs.queryCount ) - && ( pipelineStatistics == rhs.pipelineStatistics ); - } - - bool operator!=( QueryPoolCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::QueryPoolCreateInfo::sType; - }; - static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct QueryPoolCreateInfoINTEL - { - protected: - QueryPoolCreateInfoINTEL( vk::QueryPoolSamplingModeINTEL performanceCountersSampling_ = vk::QueryPoolSamplingModeINTEL::eManual ) - : performanceCountersSampling( performanceCountersSampling_ ) - {} - - QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL; - const void* pNext = nullptr; - vk::QueryPoolSamplingModeINTEL performanceCountersSampling; - }; - static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "layout struct and wrapper have different size!" ); - } - - struct QueryPoolCreateInfoINTEL : public layout::QueryPoolCreateInfoINTEL - { - QueryPoolCreateInfoINTEL( vk::QueryPoolSamplingModeINTEL performanceCountersSampling_ = vk::QueryPoolSamplingModeINTEL::eManual ) - : layout::QueryPoolCreateInfoINTEL( performanceCountersSampling_ ) - {} - - QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) - : layout::QueryPoolCreateInfoINTEL( rhs ) - {} - - QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - QueryPoolCreateInfoINTEL & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( vk::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) - { - performanceCountersSampling = performanceCountersSampling_; - return *this; - } - - operator VkQueryPoolCreateInfoINTEL const&() const - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolCreateInfoINTEL &() - { - return *reinterpret_cast( this ); - } - - bool operator==( QueryPoolCreateInfoINTEL const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( performanceCountersSampling == rhs.performanceCountersSampling ); - } - - bool operator!=( QueryPoolCreateInfoINTEL const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::QueryPoolCreateInfoINTEL::sType; - }; - static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct QueueFamilyCheckpointPropertiesNV - { - protected: - QueueFamilyCheckpointPropertiesNV( vk::PipelineStageFlags checkpointExecutionStageMask_ = vk::PipelineStageFlags() ) - : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) - {} - - QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; - void* pNext = nullptr; - vk::PipelineStageFlags checkpointExecutionStageMask; - }; - static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "layout struct and wrapper have different size!" ); - } - - struct QueueFamilyCheckpointPropertiesNV : public layout::QueueFamilyCheckpointPropertiesNV - { - operator VkQueueFamilyCheckpointPropertiesNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyCheckpointPropertiesNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); - } - - bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::QueueFamilyCheckpointPropertiesNV::sType; - }; - static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct QueueFamilyProperties - { - operator VkQueueFamilyProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( QueueFamilyProperties const& rhs ) const - { - return ( queueFlags == rhs.queueFlags ) - && ( queueCount == rhs.queueCount ) - && ( timestampValidBits == rhs.timestampValidBits ) - && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); - } - - bool operator!=( QueueFamilyProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::QueueFlags queueFlags; - uint32_t queueCount; - uint32_t timestampValidBits; - vk::Extent3D minImageTransferGranularity; - }; - static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct QueueFamilyProperties2 - { - protected: - QueueFamilyProperties2( vk::QueueFamilyProperties queueFamilyProperties_ = vk::QueueFamilyProperties() ) - : queueFamilyProperties( queueFamilyProperties_ ) - {} - - QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eQueueFamilyProperties2; - void* pNext = nullptr; - vk::QueueFamilyProperties queueFamilyProperties; - }; - static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "layout struct and wrapper have different size!" ); - } - - struct QueueFamilyProperties2 : public layout::QueueFamilyProperties2 - { - operator VkQueueFamilyProperties2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkQueueFamilyProperties2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( QueueFamilyProperties2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( queueFamilyProperties == rhs.queueFamilyProperties ); - } - - bool operator!=( QueueFamilyProperties2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::QueueFamilyProperties2::sType; - }; - static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RayTracingShaderGroupCreateInfoNV - { - protected: - RayTracingShaderGroupCreateInfoNV( vk::RayTracingShaderGroupTypeNV type_ = vk::RayTracingShaderGroupTypeNV::eGeneral, - uint32_t generalShader_ = 0, - uint32_t closestHitShader_ = 0, - uint32_t anyHitShader_ = 0, - uint32_t intersectionShader_ = 0 ) - : type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) - {} - - RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; - const void* pNext = nullptr; - vk::RayTracingShaderGroupTypeNV type; - uint32_t generalShader; - uint32_t closestHitShader; - uint32_t anyHitShader; - uint32_t intersectionShader; - }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct RayTracingShaderGroupCreateInfoNV : public layout::RayTracingShaderGroupCreateInfoNV - { - RayTracingShaderGroupCreateInfoNV( vk::RayTracingShaderGroupTypeNV type_ = vk::RayTracingShaderGroupTypeNV::eGeneral, - uint32_t generalShader_ = 0, - uint32_t closestHitShader_ = 0, - uint32_t anyHitShader_ = 0, - uint32_t intersectionShader_ = 0 ) - : layout::RayTracingShaderGroupCreateInfoNV( type_, generalShader_, closestHitShader_, anyHitShader_, intersectionShader_ ) - {} - - RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) - : layout::RayTracingShaderGroupCreateInfoNV( rhs ) - {} - - RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setType( vk::RayTracingShaderGroupTypeNV type_ ) - { - type = type_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) - { - generalShader = generalShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) - { - closestHitShader = closestHitShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) - { - anyHitShader = anyHitShader_; - return *this; - } - - RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) - { - intersectionShader = intersectionShader_; - return *this; - } - - operator VkRayTracingShaderGroupCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingShaderGroupCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( type == rhs.type ) - && ( generalShader == rhs.generalShader ) - && ( closestHitShader == rhs.closestHitShader ) - && ( anyHitShader == rhs.anyHitShader ) - && ( intersectionShader == rhs.intersectionShader ); - } - - bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RayTracingShaderGroupCreateInfoNV::sType; - }; - static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RayTracingPipelineCreateInfoNV - { - protected: - RayTracingPipelineCreateInfoNV( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(), - uint32_t stageCount_ = 0, - const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr, - uint32_t groupCount_ = 0, - const vk::RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr, - uint32_t maxRecursionDepth_ = 0, - vk::PipelineLayout layout_ = vk::PipelineLayout(), - vk::Pipeline basePipelineHandle_ = vk::Pipeline(), - int32_t basePipelineIndex_ = 0 ) - : flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - {} - - RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; - const void* pNext = nullptr; - vk::PipelineCreateFlags flags; - uint32_t stageCount; - const vk::PipelineShaderStageCreateInfo* pStages; - uint32_t groupCount; - const vk::RayTracingShaderGroupCreateInfoNV* pGroups; - uint32_t maxRecursionDepth; - vk::PipelineLayout layout; - vk::Pipeline basePipelineHandle; - int32_t basePipelineIndex; - }; - static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct RayTracingPipelineCreateInfoNV : public layout::RayTracingPipelineCreateInfoNV - { - RayTracingPipelineCreateInfoNV( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(), - uint32_t stageCount_ = 0, - const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr, - uint32_t groupCount_ = 0, - const vk::RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr, - uint32_t maxRecursionDepth_ = 0, - vk::PipelineLayout layout_ = vk::PipelineLayout(), - vk::Pipeline basePipelineHandle_ = vk::Pipeline(), - int32_t basePipelineIndex_ = 0 ) - : layout::RayTracingPipelineCreateInfoNV( flags_, stageCount_, pStages_, groupCount_, pGroups_, maxRecursionDepth_, layout_, basePipelineHandle_, basePipelineIndex_ ) - {} - - RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) - : layout::RayTracingPipelineCreateInfoNV( rhs ) - {} - - RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setFlags( vk::PipelineCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) - { - stageCount = stageCount_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPStages( const vk::PipelineShaderStageCreateInfo* pStages_ ) - { - pStages = pStages_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) - { - groupCount = groupCount_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setPGroups( const vk::RayTracingShaderGroupCreateInfoNV* pGroups_ ) - { - pGroups = pGroups_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) - { - maxRecursionDepth = maxRecursionDepth_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setLayout( vk::PipelineLayout layout_ ) - { - layout = layout_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setBasePipelineHandle( vk::Pipeline basePipelineHandle_ ) - { - basePipelineHandle = basePipelineHandle_; - return *this; - } - - RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) - { - basePipelineIndex = basePipelineIndex_; - return *this; - } - - operator VkRayTracingPipelineCreateInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRayTracingPipelineCreateInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( stageCount == rhs.stageCount ) - && ( pStages == rhs.pStages ) - && ( groupCount == rhs.groupCount ) - && ( pGroups == rhs.pGroups ) - && ( maxRecursionDepth == rhs.maxRecursionDepth ) - && ( layout == rhs.layout ) - && ( basePipelineHandle == rhs.basePipelineHandle ) - && ( basePipelineIndex == rhs.basePipelineIndex ); - } - - bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RayTracingPipelineCreateInfoNV::sType; - }; - static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct RefreshCycleDurationGOOGLE - { - operator VkRefreshCycleDurationGOOGLE const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRefreshCycleDurationGOOGLE &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const - { - return ( refreshDuration == rhs.refreshDuration ); - } - - bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint64_t refreshDuration; - }; - static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassAttachmentBeginInfoKHR - { - protected: - RenderPassAttachmentBeginInfoKHR( uint32_t attachmentCount_ = 0, - const vk::ImageView* pAttachments_ = nullptr ) - : attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - {} - - RenderPassAttachmentBeginInfoKHR( VkRenderPassAttachmentBeginInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassAttachmentBeginInfoKHR& operator=( VkRenderPassAttachmentBeginInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassAttachmentBeginInfoKHR; - const void* pNext = nullptr; - uint32_t attachmentCount; - const vk::ImageView* pAttachments; - }; - static_assert( sizeof( RenderPassAttachmentBeginInfoKHR ) == sizeof( VkRenderPassAttachmentBeginInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassAttachmentBeginInfoKHR : public layout::RenderPassAttachmentBeginInfoKHR - { - RenderPassAttachmentBeginInfoKHR( uint32_t attachmentCount_ = 0, - const vk::ImageView* pAttachments_ = nullptr ) - : layout::RenderPassAttachmentBeginInfoKHR( attachmentCount_, pAttachments_ ) - {} - - RenderPassAttachmentBeginInfoKHR( VkRenderPassAttachmentBeginInfoKHR const & rhs ) - : layout::RenderPassAttachmentBeginInfoKHR( rhs ) - {} - - RenderPassAttachmentBeginInfoKHR& operator=( VkRenderPassAttachmentBeginInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassAttachmentBeginInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassAttachmentBeginInfoKHR & setAttachmentCount( uint32_t attachmentCount_ ) - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassAttachmentBeginInfoKHR & setPAttachments( const vk::ImageView* pAttachments_ ) - { - pAttachments = pAttachments_; - return *this; - } - - operator VkRenderPassAttachmentBeginInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassAttachmentBeginInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassAttachmentBeginInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ); - } - - bool operator!=( RenderPassAttachmentBeginInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassAttachmentBeginInfoKHR::sType; - }; - static_assert( sizeof( RenderPassAttachmentBeginInfoKHR ) == sizeof( VkRenderPassAttachmentBeginInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassBeginInfo - { - protected: - RenderPassBeginInfo( vk::RenderPass renderPass_ = vk::RenderPass(), - vk::Framebuffer framebuffer_ = vk::Framebuffer(), - vk::Rect2D renderArea_ = vk::Rect2D(), - uint32_t clearValueCount_ = 0, - const vk::ClearValue* pClearValues_ = nullptr ) - : renderPass( renderPass_ ) - , framebuffer( framebuffer_ ) - , renderArea( renderArea_ ) - , clearValueCount( clearValueCount_ ) - , pClearValues( pClearValues_ ) - {} - - RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassBeginInfo; - const void* pNext = nullptr; - vk::RenderPass renderPass; - vk::Framebuffer framebuffer; - vk::Rect2D renderArea; - uint32_t clearValueCount; - const vk::ClearValue* pClearValues; - }; - static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassBeginInfo : public layout::RenderPassBeginInfo - { - RenderPassBeginInfo( vk::RenderPass renderPass_ = vk::RenderPass(), - vk::Framebuffer framebuffer_ = vk::Framebuffer(), - vk::Rect2D renderArea_ = vk::Rect2D(), - uint32_t clearValueCount_ = 0, - const vk::ClearValue* pClearValues_ = nullptr ) - : layout::RenderPassBeginInfo( renderPass_, framebuffer_, renderArea_, clearValueCount_, pClearValues_ ) - {} - - RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) - : layout::RenderPassBeginInfo( rhs ) - {} - - RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassBeginInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassBeginInfo & setRenderPass( vk::RenderPass renderPass_ ) - { - renderPass = renderPass_; - return *this; - } - - RenderPassBeginInfo & setFramebuffer( vk::Framebuffer framebuffer_ ) - { - framebuffer = framebuffer_; - return *this; - } - - RenderPassBeginInfo & setRenderArea( vk::Rect2D renderArea_ ) - { - renderArea = renderArea_; - return *this; - } - - RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) - { - clearValueCount = clearValueCount_; - return *this; - } - - RenderPassBeginInfo & setPClearValues( const vk::ClearValue* pClearValues_ ) - { - pClearValues = pClearValues_; - return *this; - } - - operator VkRenderPassBeginInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassBeginInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassBeginInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( renderPass == rhs.renderPass ) - && ( framebuffer == rhs.framebuffer ) - && ( renderArea == rhs.renderArea ) - && ( clearValueCount == rhs.clearValueCount ) - && ( pClearValues == rhs.pClearValues ); - } - - bool operator!=( RenderPassBeginInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassBeginInfo::sType; - }; - static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDescription - { - SubpassDescription( vk::SubpassDescriptionFlags flags_ = vk::SubpassDescriptionFlags(), - vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - uint32_t inputAttachmentCount_ = 0, - const vk::AttachmentReference* pInputAttachments_ = nullptr, - uint32_t colorAttachmentCount_ = 0, - const vk::AttachmentReference* pColorAttachments_ = nullptr, - const vk::AttachmentReference* pResolveAttachments_ = nullptr, - const vk::AttachmentReference* pDepthStencilAttachment_ = nullptr, - uint32_t preserveAttachmentCount_ = 0, - const uint32_t* pPreserveAttachments_ = nullptr ) - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) - {} - - SubpassDescription( VkSubpassDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassDescription& operator=( VkSubpassDescription const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassDescription & setFlags( vk::SubpassDescriptionFlags flags_ ) - { - flags = flags_; - return *this; - } - - SubpassDescription & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) - { - inputAttachmentCount = inputAttachmentCount_; - return *this; - } - - SubpassDescription & setPInputAttachments( const vk::AttachmentReference* pInputAttachments_ ) - { - pInputAttachments = pInputAttachments_; - return *this; - } - - SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - SubpassDescription & setPColorAttachments( const vk::AttachmentReference* pColorAttachments_ ) - { - pColorAttachments = pColorAttachments_; - return *this; - } - - SubpassDescription & setPResolveAttachments( const vk::AttachmentReference* pResolveAttachments_ ) - { - pResolveAttachments = pResolveAttachments_; - return *this; - } - - SubpassDescription & setPDepthStencilAttachment( const vk::AttachmentReference* pDepthStencilAttachment_ ) - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } - - SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) - { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; - } - - SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } - - operator VkSubpassDescription const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescription &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDescription const& rhs ) const - { - return ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); - } - - bool operator!=( SubpassDescription const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::SubpassDescriptionFlags flags; - vk::PipelineBindPoint pipelineBindPoint; - uint32_t inputAttachmentCount; - const vk::AttachmentReference* pInputAttachments; - uint32_t colorAttachmentCount; - const vk::AttachmentReference* pColorAttachments; - const vk::AttachmentReference* pResolveAttachments; - const vk::AttachmentReference* pDepthStencilAttachment; - uint32_t preserveAttachmentCount; - const uint32_t* pPreserveAttachments; - }; - static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassDependency - { - SubpassDependency( uint32_t srcSubpass_ = 0, - uint32_t dstSubpass_ = 0, - vk::PipelineStageFlags srcStageMask_ = vk::PipelineStageFlags(), - vk::PipelineStageFlags dstStageMask_ = vk::PipelineStageFlags(), - vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - vk::DependencyFlags dependencyFlags_ = vk::DependencyFlags() ) - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - {} - - SubpassDependency( VkSubpassDependency const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassDependency& operator=( VkSubpassDependency const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) - { - srcSubpass = srcSubpass_; - return *this; - } - - SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) - { - dstSubpass = dstSubpass_; - return *this; - } - - SubpassDependency & setSrcStageMask( vk::PipelineStageFlags srcStageMask_ ) - { - srcStageMask = srcStageMask_; - return *this; - } - - SubpassDependency & setDstStageMask( vk::PipelineStageFlags dstStageMask_ ) - { - dstStageMask = dstStageMask_; - return *this; - } - - SubpassDependency & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) - { - srcAccessMask = srcAccessMask_; - return *this; - } - - SubpassDependency & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) - { - dstAccessMask = dstAccessMask_; - return *this; - } - - SubpassDependency & setDependencyFlags( vk::DependencyFlags dependencyFlags_ ) - { - dependencyFlags = dependencyFlags_; - return *this; - } - - operator VkSubpassDependency const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDependency &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDependency const& rhs ) const - { - return ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ); - } - - bool operator!=( SubpassDependency const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t srcSubpass; - uint32_t dstSubpass; - vk::PipelineStageFlags srcStageMask; - vk::PipelineStageFlags dstStageMask; - vk::AccessFlags srcAccessMask; - vk::AccessFlags dstAccessMask; - vk::DependencyFlags dependencyFlags; - }; - static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassCreateInfo - { - protected: - RenderPassCreateInfo( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(), - uint32_t attachmentCount_ = 0, - const vk::AttachmentDescription* pAttachments_ = nullptr, - uint32_t subpassCount_ = 0, - const vk::SubpassDescription* pSubpasses_ = nullptr, - uint32_t dependencyCount_ = 0, - const vk::SubpassDependency* pDependencies_ = nullptr ) - : flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - {} - - RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassCreateInfo; - const void* pNext = nullptr; - vk::RenderPassCreateFlags flags; - uint32_t attachmentCount; - const vk::AttachmentDescription* pAttachments; - uint32_t subpassCount; - const vk::SubpassDescription* pSubpasses; - uint32_t dependencyCount; - const vk::SubpassDependency* pDependencies; - }; - static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassCreateInfo : public layout::RenderPassCreateInfo - { - RenderPassCreateInfo( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(), - uint32_t attachmentCount_ = 0, - const vk::AttachmentDescription* pAttachments_ = nullptr, - uint32_t subpassCount_ = 0, - const vk::SubpassDescription* pSubpasses_ = nullptr, - uint32_t dependencyCount_ = 0, - const vk::SubpassDependency* pDependencies_ = nullptr ) - : layout::RenderPassCreateInfo( flags_, attachmentCount_, pAttachments_, subpassCount_, pSubpasses_, dependencyCount_, pDependencies_ ) - {} - - RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) - : layout::RenderPassCreateInfo( rhs ) - {} - - RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassCreateInfo & setFlags( vk::RenderPassCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassCreateInfo & setPAttachments( const vk::AttachmentDescription* pAttachments_ ) - { - pAttachments = pAttachments_; - return *this; - } - - RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassCreateInfo & setPSubpasses( const vk::SubpassDescription* pSubpasses_ ) - { - pSubpasses = pSubpasses_; - return *this; - } - - RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassCreateInfo & setPDependencies( const vk::SubpassDependency* pDependencies_ ) - { - pDependencies = pDependencies_; - return *this; - } - - operator VkRenderPassCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ); - } - - bool operator!=( RenderPassCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassCreateInfo::sType; - }; - static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SubpassDescription2KHR - { - protected: - SubpassDescription2KHR( vk::SubpassDescriptionFlags flags_ = vk::SubpassDescriptionFlags(), - vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - uint32_t viewMask_ = 0, - uint32_t inputAttachmentCount_ = 0, - const vk::AttachmentReference2KHR* pInputAttachments_ = nullptr, - uint32_t colorAttachmentCount_ = 0, - const vk::AttachmentReference2KHR* pColorAttachments_ = nullptr, - const vk::AttachmentReference2KHR* pResolveAttachments_ = nullptr, - const vk::AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr, - uint32_t preserveAttachmentCount_ = 0, - const uint32_t* pPreserveAttachments_ = nullptr ) - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , viewMask( viewMask_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) - {} - - SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSubpassDescription2KHR; - const void* pNext = nullptr; - vk::SubpassDescriptionFlags flags; - vk::PipelineBindPoint pipelineBindPoint; - uint32_t viewMask; - uint32_t inputAttachmentCount; - const vk::AttachmentReference2KHR* pInputAttachments; - uint32_t colorAttachmentCount; - const vk::AttachmentReference2KHR* pColorAttachments; - const vk::AttachmentReference2KHR* pResolveAttachments; - const vk::AttachmentReference2KHR* pDepthStencilAttachment; - uint32_t preserveAttachmentCount; - const uint32_t* pPreserveAttachments; - }; - static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "layout struct and wrapper have different size!" ); - } - - struct SubpassDescription2KHR : public layout::SubpassDescription2KHR - { - SubpassDescription2KHR( vk::SubpassDescriptionFlags flags_ = vk::SubpassDescriptionFlags(), - vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics, - uint32_t viewMask_ = 0, - uint32_t inputAttachmentCount_ = 0, - const vk::AttachmentReference2KHR* pInputAttachments_ = nullptr, - uint32_t colorAttachmentCount_ = 0, - const vk::AttachmentReference2KHR* pColorAttachments_ = nullptr, - const vk::AttachmentReference2KHR* pResolveAttachments_ = nullptr, - const vk::AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr, - uint32_t preserveAttachmentCount_ = 0, - const uint32_t* pPreserveAttachments_ = nullptr ) - : layout::SubpassDescription2KHR( flags_, pipelineBindPoint_, viewMask_, inputAttachmentCount_, pInputAttachments_, colorAttachmentCount_, pColorAttachments_, pResolveAttachments_, pDepthStencilAttachment_, preserveAttachmentCount_, pPreserveAttachments_ ) - {} - - SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) - : layout::SubpassDescription2KHR( rhs ) - {} - - SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassDescription2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SubpassDescription2KHR & setFlags( vk::SubpassDescriptionFlags flags_ ) - { - flags = flags_; - return *this; - } - - SubpassDescription2KHR & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - SubpassDescription2KHR & setViewMask( uint32_t viewMask_ ) - { - viewMask = viewMask_; - return *this; - } - - SubpassDescription2KHR & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) - { - inputAttachmentCount = inputAttachmentCount_; - return *this; - } - - SubpassDescription2KHR & setPInputAttachments( const vk::AttachmentReference2KHR* pInputAttachments_ ) - { - pInputAttachments = pInputAttachments_; - return *this; - } - - SubpassDescription2KHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - SubpassDescription2KHR & setPColorAttachments( const vk::AttachmentReference2KHR* pColorAttachments_ ) - { - pColorAttachments = pColorAttachments_; - return *this; - } - - SubpassDescription2KHR & setPResolveAttachments( const vk::AttachmentReference2KHR* pResolveAttachments_ ) - { - pResolveAttachments = pResolveAttachments_; - return *this; - } - - SubpassDescription2KHR & setPDepthStencilAttachment( const vk::AttachmentReference2KHR* pDepthStencilAttachment_ ) - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } - - SubpassDescription2KHR & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) - { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; - } - - SubpassDescription2KHR & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } - - operator VkSubpassDescription2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescription2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDescription2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( pipelineBindPoint == rhs.pipelineBindPoint ) - && ( viewMask == rhs.viewMask ) - && ( inputAttachmentCount == rhs.inputAttachmentCount ) - && ( pInputAttachments == rhs.pInputAttachments ) - && ( colorAttachmentCount == rhs.colorAttachmentCount ) - && ( pColorAttachments == rhs.pColorAttachments ) - && ( pResolveAttachments == rhs.pResolveAttachments ) - && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) - && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) - && ( pPreserveAttachments == rhs.pPreserveAttachments ); - } - - bool operator!=( SubpassDescription2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SubpassDescription2KHR::sType; - }; - static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SubpassDependency2KHR - { - protected: - SubpassDependency2KHR( uint32_t srcSubpass_ = 0, - uint32_t dstSubpass_ = 0, - vk::PipelineStageFlags srcStageMask_ = vk::PipelineStageFlags(), - vk::PipelineStageFlags dstStageMask_ = vk::PipelineStageFlags(), - vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - vk::DependencyFlags dependencyFlags_ = vk::DependencyFlags(), - int32_t viewOffset_ = 0 ) - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - , viewOffset( viewOffset_ ) - {} - - SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSubpassDependency2KHR; - const void* pNext = nullptr; - uint32_t srcSubpass; - uint32_t dstSubpass; - vk::PipelineStageFlags srcStageMask; - vk::PipelineStageFlags dstStageMask; - vk::AccessFlags srcAccessMask; - vk::AccessFlags dstAccessMask; - vk::DependencyFlags dependencyFlags; - int32_t viewOffset; - }; - static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "layout struct and wrapper have different size!" ); - } - - struct SubpassDependency2KHR : public layout::SubpassDependency2KHR - { - SubpassDependency2KHR( uint32_t srcSubpass_ = 0, - uint32_t dstSubpass_ = 0, - vk::PipelineStageFlags srcStageMask_ = vk::PipelineStageFlags(), - vk::PipelineStageFlags dstStageMask_ = vk::PipelineStageFlags(), - vk::AccessFlags srcAccessMask_ = vk::AccessFlags(), - vk::AccessFlags dstAccessMask_ = vk::AccessFlags(), - vk::DependencyFlags dependencyFlags_ = vk::DependencyFlags(), - int32_t viewOffset_ = 0 ) - : layout::SubpassDependency2KHR( srcSubpass_, dstSubpass_, srcStageMask_, dstStageMask_, srcAccessMask_, dstAccessMask_, dependencyFlags_, viewOffset_ ) - {} - - SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) - : layout::SubpassDependency2KHR( rhs ) - {} - - SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassDependency2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SubpassDependency2KHR & setSrcSubpass( uint32_t srcSubpass_ ) - { - srcSubpass = srcSubpass_; - return *this; - } - - SubpassDependency2KHR & setDstSubpass( uint32_t dstSubpass_ ) - { - dstSubpass = dstSubpass_; - return *this; - } - - SubpassDependency2KHR & setSrcStageMask( vk::PipelineStageFlags srcStageMask_ ) - { - srcStageMask = srcStageMask_; - return *this; - } - - SubpassDependency2KHR & setDstStageMask( vk::PipelineStageFlags dstStageMask_ ) - { - dstStageMask = dstStageMask_; - return *this; - } - - SubpassDependency2KHR & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) - { - srcAccessMask = srcAccessMask_; - return *this; - } - - SubpassDependency2KHR & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) - { - dstAccessMask = dstAccessMask_; - return *this; - } - - SubpassDependency2KHR & setDependencyFlags( vk::DependencyFlags dependencyFlags_ ) - { - dependencyFlags = dependencyFlags_; - return *this; - } - - SubpassDependency2KHR & setViewOffset( int32_t viewOffset_ ) - { - viewOffset = viewOffset_; - return *this; - } - - operator VkSubpassDependency2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDependency2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDependency2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( srcSubpass == rhs.srcSubpass ) - && ( dstSubpass == rhs.dstSubpass ) - && ( srcStageMask == rhs.srcStageMask ) - && ( dstStageMask == rhs.dstStageMask ) - && ( srcAccessMask == rhs.srcAccessMask ) - && ( dstAccessMask == rhs.dstAccessMask ) - && ( dependencyFlags == rhs.dependencyFlags ) - && ( viewOffset == rhs.viewOffset ); - } - - bool operator!=( SubpassDependency2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SubpassDependency2KHR::sType; - }; - static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassCreateInfo2KHR - { - protected: - RenderPassCreateInfo2KHR( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(), - uint32_t attachmentCount_ = 0, - const vk::AttachmentDescription2KHR* pAttachments_ = nullptr, - uint32_t subpassCount_ = 0, - const vk::SubpassDescription2KHR* pSubpasses_ = nullptr, - uint32_t dependencyCount_ = 0, - const vk::SubpassDependency2KHR* pDependencies_ = nullptr, - uint32_t correlatedViewMaskCount_ = 0, - const uint32_t* pCorrelatedViewMasks_ = nullptr ) - : flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - , correlatedViewMaskCount( correlatedViewMaskCount_ ) - , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) - {} - - RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassCreateInfo2KHR; - const void* pNext = nullptr; - vk::RenderPassCreateFlags flags; - uint32_t attachmentCount; - const vk::AttachmentDescription2KHR* pAttachments; - uint32_t subpassCount; - const vk::SubpassDescription2KHR* pSubpasses; - uint32_t dependencyCount; - const vk::SubpassDependency2KHR* pDependencies; - uint32_t correlatedViewMaskCount; - const uint32_t* pCorrelatedViewMasks; - }; - static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassCreateInfo2KHR : public layout::RenderPassCreateInfo2KHR - { - RenderPassCreateInfo2KHR( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(), - uint32_t attachmentCount_ = 0, - const vk::AttachmentDescription2KHR* pAttachments_ = nullptr, - uint32_t subpassCount_ = 0, - const vk::SubpassDescription2KHR* pSubpasses_ = nullptr, - uint32_t dependencyCount_ = 0, - const vk::SubpassDependency2KHR* pDependencies_ = nullptr, - uint32_t correlatedViewMaskCount_ = 0, - const uint32_t* pCorrelatedViewMasks_ = nullptr ) - : layout::RenderPassCreateInfo2KHR( flags_, attachmentCount_, pAttachments_, subpassCount_, pSubpasses_, dependencyCount_, pDependencies_, correlatedViewMaskCount_, pCorrelatedViewMasks_ ) - {} - - RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) - : layout::RenderPassCreateInfo2KHR( rhs ) - {} - - RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassCreateInfo2KHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassCreateInfo2KHR & setFlags( vk::RenderPassCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - RenderPassCreateInfo2KHR & setAttachmentCount( uint32_t attachmentCount_ ) - { - attachmentCount = attachmentCount_; - return *this; - } - - RenderPassCreateInfo2KHR & setPAttachments( const vk::AttachmentDescription2KHR* pAttachments_ ) - { - pAttachments = pAttachments_; - return *this; - } - - RenderPassCreateInfo2KHR & setSubpassCount( uint32_t subpassCount_ ) - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassCreateInfo2KHR & setPSubpasses( const vk::SubpassDescription2KHR* pSubpasses_ ) - { - pSubpasses = pSubpasses_; - return *this; - } - - RenderPassCreateInfo2KHR & setDependencyCount( uint32_t dependencyCount_ ) - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassCreateInfo2KHR & setPDependencies( const vk::SubpassDependency2KHR* pDependencies_ ) - { - pDependencies = pDependencies_; - return *this; - } - - RenderPassCreateInfo2KHR & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) - { - correlatedViewMaskCount = correlatedViewMaskCount_; - return *this; - } - - RenderPassCreateInfo2KHR & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) - { - pCorrelatedViewMasks = pCorrelatedViewMasks_; - return *this; - } - - operator VkRenderPassCreateInfo2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassCreateInfo2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassCreateInfo2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( attachmentCount == rhs.attachmentCount ) - && ( pAttachments == rhs.pAttachments ) - && ( subpassCount == rhs.subpassCount ) - && ( pSubpasses == rhs.pSubpasses ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pDependencies == rhs.pDependencies ) - && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) - && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); - } - - bool operator!=( RenderPassCreateInfo2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassCreateInfo2KHR::sType; - }; - static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassFragmentDensityMapCreateInfoEXT - { - protected: - RenderPassFragmentDensityMapCreateInfoEXT( vk::AttachmentReference fragmentDensityMapAttachment_ = vk::AttachmentReference() ) - : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) - {} - - RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; - const void* pNext = nullptr; - vk::AttachmentReference fragmentDensityMapAttachment; - }; - static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassFragmentDensityMapCreateInfoEXT : public layout::RenderPassFragmentDensityMapCreateInfoEXT - { - RenderPassFragmentDensityMapCreateInfoEXT( vk::AttachmentReference fragmentDensityMapAttachment_ = vk::AttachmentReference() ) - : layout::RenderPassFragmentDensityMapCreateInfoEXT( fragmentDensityMapAttachment_ ) - {} - - RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) - : layout::RenderPassFragmentDensityMapCreateInfoEXT( rhs ) - {} - - RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( vk::AttachmentReference fragmentDensityMapAttachment_ ) - { - fragmentDensityMapAttachment = fragmentDensityMapAttachment_; - return *this; - } - - operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassFragmentDensityMapCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); - } - - bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassFragmentDensityMapCreateInfoEXT::sType; - }; - static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassInputAttachmentAspectCreateInfo - { - protected: - RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0, - const vk::InputAttachmentAspectReference* pAspectReferences_ = nullptr ) - : aspectReferenceCount( aspectReferenceCount_ ) - , pAspectReferences( pAspectReferences_ ) - {} - - RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; - const void* pNext = nullptr; - uint32_t aspectReferenceCount; - const vk::InputAttachmentAspectReference* pAspectReferences; - }; - static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassInputAttachmentAspectCreateInfo : public layout::RenderPassInputAttachmentAspectCreateInfo - { - RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0, - const vk::InputAttachmentAspectReference* pAspectReferences_ = nullptr ) - : layout::RenderPassInputAttachmentAspectCreateInfo( aspectReferenceCount_, pAspectReferences_ ) - {} - - RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) - : layout::RenderPassInputAttachmentAspectCreateInfo( rhs ) - {} - - RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) - { - aspectReferenceCount = aspectReferenceCount_; - return *this; - } - - RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const vk::InputAttachmentAspectReference* pAspectReferences_ ) - { - pAspectReferences = pAspectReferences_; - return *this; - } - - operator VkRenderPassInputAttachmentAspectCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassInputAttachmentAspectCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( aspectReferenceCount == rhs.aspectReferenceCount ) - && ( pAspectReferences == rhs.pAspectReferences ); - } - - bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassInputAttachmentAspectCreateInfo::sType; - }; - static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassMultiviewCreateInfo - { - protected: - RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0, - const uint32_t* pViewMasks_ = nullptr, - uint32_t dependencyCount_ = 0, - const int32_t* pViewOffsets_ = nullptr, - uint32_t correlationMaskCount_ = 0, - const uint32_t* pCorrelationMasks_ = nullptr ) - : subpassCount( subpassCount_ ) - , pViewMasks( pViewMasks_ ) - , dependencyCount( dependencyCount_ ) - , pViewOffsets( pViewOffsets_ ) - , correlationMaskCount( correlationMaskCount_ ) - , pCorrelationMasks( pCorrelationMasks_ ) - {} - - RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; - const void* pNext = nullptr; - uint32_t subpassCount; - const uint32_t* pViewMasks; - uint32_t dependencyCount; - const int32_t* pViewOffsets; - uint32_t correlationMaskCount; - const uint32_t* pCorrelationMasks; - }; - static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassMultiviewCreateInfo : public layout::RenderPassMultiviewCreateInfo - { - RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0, - const uint32_t* pViewMasks_ = nullptr, - uint32_t dependencyCount_ = 0, - const int32_t* pViewOffsets_ = nullptr, - uint32_t correlationMaskCount_ = 0, - const uint32_t* pCorrelationMasks_ = nullptr ) - : layout::RenderPassMultiviewCreateInfo( subpassCount_, pViewMasks_, dependencyCount_, pViewOffsets_, correlationMaskCount_, pCorrelationMasks_ ) - {} - - RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) - : layout::RenderPassMultiviewCreateInfo( rhs ) - {} - - RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) - { - subpassCount = subpassCount_; - return *this; - } - - RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) - { - pViewMasks = pViewMasks_; - return *this; - } - - RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) - { - dependencyCount = dependencyCount_; - return *this; - } - - RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) - { - pViewOffsets = pViewOffsets_; - return *this; - } - - RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) - { - correlationMaskCount = correlationMaskCount_; - return *this; - } - - RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) - { - pCorrelationMasks = pCorrelationMasks_; - return *this; - } - - operator VkRenderPassMultiviewCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassMultiviewCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( subpassCount == rhs.subpassCount ) - && ( pViewMasks == rhs.pViewMasks ) - && ( dependencyCount == rhs.dependencyCount ) - && ( pViewOffsets == rhs.pViewOffsets ) - && ( correlationMaskCount == rhs.correlationMaskCount ) - && ( pCorrelationMasks == rhs.pCorrelationMasks ); - } - - bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassMultiviewCreateInfo::sType; - }; - static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SubpassSampleLocationsEXT - { - SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0, - vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) - : subpassIndex( subpassIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) - {} - - SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) - { - subpassIndex = subpassIndex_; - return *this; - } - - SubpassSampleLocationsEXT & setSampleLocationsInfo( vk::SampleLocationsInfoEXT sampleLocationsInfo_ ) - { - sampleLocationsInfo = sampleLocationsInfo_; - return *this; - } - - operator VkSubpassSampleLocationsEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassSampleLocationsEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassSampleLocationsEXT const& rhs ) const - { - return ( subpassIndex == rhs.subpassIndex ) - && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); - } - - bool operator!=( SubpassSampleLocationsEXT const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t subpassIndex; - vk::SampleLocationsInfoEXT sampleLocationsInfo; - }; - static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct RenderPassSampleLocationsBeginInfoEXT - { - protected: - RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0, - const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr, - uint32_t postSubpassSampleLocationsCount_ = 0, - const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) - : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) - , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) - , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) - , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) - {} - - RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; - const void* pNext = nullptr; - uint32_t attachmentInitialSampleLocationsCount; - const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; - uint32_t postSubpassSampleLocationsCount; - const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations; - }; - static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct RenderPassSampleLocationsBeginInfoEXT : public layout::RenderPassSampleLocationsBeginInfoEXT - { - RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0, - const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr, - uint32_t postSubpassSampleLocationsCount_ = 0, - const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) - : layout::RenderPassSampleLocationsBeginInfoEXT( attachmentInitialSampleLocationsCount_, pAttachmentInitialSampleLocations_, postSubpassSampleLocationsCount_, pPostSubpassSampleLocations_ ) - {} - - RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) - : layout::RenderPassSampleLocationsBeginInfoEXT( rhs ) - {} - - RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) - { - attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; - return *this; - } - - RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) - { - pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; - return *this; - } - - RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) - { - postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; - return *this; - } - - RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) - { - pPostSubpassSampleLocations = pPostSubpassSampleLocations_; - return *this; - } - - operator VkRenderPassSampleLocationsBeginInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkRenderPassSampleLocationsBeginInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) - && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) - && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) - && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); - } - - bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::RenderPassSampleLocationsBeginInfoEXT::sType; - }; - static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SamplerCreateInfo - { - protected: - SamplerCreateInfo( vk::SamplerCreateFlags flags_ = vk::SamplerCreateFlags(), - vk::Filter magFilter_ = vk::Filter::eNearest, - vk::Filter minFilter_ = vk::Filter::eNearest, - vk::SamplerMipmapMode mipmapMode_ = vk::SamplerMipmapMode::eNearest, - vk::SamplerAddressMode addressModeU_ = vk::SamplerAddressMode::eRepeat, - vk::SamplerAddressMode addressModeV_ = vk::SamplerAddressMode::eRepeat, - vk::SamplerAddressMode addressModeW_ = vk::SamplerAddressMode::eRepeat, - float mipLodBias_ = 0, - vk::Bool32 anisotropyEnable_ = 0, - float maxAnisotropy_ = 0, - vk::Bool32 compareEnable_ = 0, - vk::CompareOp compareOp_ = vk::CompareOp::eNever, - float minLod_ = 0, - float maxLod_ = 0, - vk::BorderColor borderColor_ = vk::BorderColor::eFloatTransparentBlack, - vk::Bool32 unnormalizedCoordinates_ = 0 ) - : flags( flags_ ) - , magFilter( magFilter_ ) - , minFilter( minFilter_ ) - , mipmapMode( mipmapMode_ ) - , addressModeU( addressModeU_ ) - , addressModeV( addressModeV_ ) - , addressModeW( addressModeW_ ) - , mipLodBias( mipLodBias_ ) - , anisotropyEnable( anisotropyEnable_ ) - , maxAnisotropy( maxAnisotropy_ ) - , compareEnable( compareEnable_ ) - , compareOp( compareOp_ ) - , minLod( minLod_ ) - , maxLod( maxLod_ ) - , borderColor( borderColor_ ) - , unnormalizedCoordinates( unnormalizedCoordinates_ ) - {} - - SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSamplerCreateInfo; - const void* pNext = nullptr; - vk::SamplerCreateFlags flags; - vk::Filter magFilter; - vk::Filter minFilter; - vk::SamplerMipmapMode mipmapMode; - vk::SamplerAddressMode addressModeU; - vk::SamplerAddressMode addressModeV; - vk::SamplerAddressMode addressModeW; - float mipLodBias; - vk::Bool32 anisotropyEnable; - float maxAnisotropy; - vk::Bool32 compareEnable; - vk::CompareOp compareOp; - float minLod; - float maxLod; - vk::BorderColor borderColor; - vk::Bool32 unnormalizedCoordinates; - }; - static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct SamplerCreateInfo : public layout::SamplerCreateInfo - { - SamplerCreateInfo( vk::SamplerCreateFlags flags_ = vk::SamplerCreateFlags(), - vk::Filter magFilter_ = vk::Filter::eNearest, - vk::Filter minFilter_ = vk::Filter::eNearest, - vk::SamplerMipmapMode mipmapMode_ = vk::SamplerMipmapMode::eNearest, - vk::SamplerAddressMode addressModeU_ = vk::SamplerAddressMode::eRepeat, - vk::SamplerAddressMode addressModeV_ = vk::SamplerAddressMode::eRepeat, - vk::SamplerAddressMode addressModeW_ = vk::SamplerAddressMode::eRepeat, - float mipLodBias_ = 0, - vk::Bool32 anisotropyEnable_ = 0, - float maxAnisotropy_ = 0, - vk::Bool32 compareEnable_ = 0, - vk::CompareOp compareOp_ = vk::CompareOp::eNever, - float minLod_ = 0, - float maxLod_ = 0, - vk::BorderColor borderColor_ = vk::BorderColor::eFloatTransparentBlack, - vk::Bool32 unnormalizedCoordinates_ = 0 ) - : layout::SamplerCreateInfo( flags_, magFilter_, minFilter_, mipmapMode_, addressModeU_, addressModeV_, addressModeW_, mipLodBias_, anisotropyEnable_, maxAnisotropy_, compareEnable_, compareOp_, minLod_, maxLod_, borderColor_, unnormalizedCoordinates_ ) - {} - - SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) - : layout::SamplerCreateInfo( rhs ) - {} - - SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SamplerCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SamplerCreateInfo & setFlags( vk::SamplerCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - SamplerCreateInfo & setMagFilter( vk::Filter magFilter_ ) - { - magFilter = magFilter_; - return *this; - } - - SamplerCreateInfo & setMinFilter( vk::Filter minFilter_ ) - { - minFilter = minFilter_; - return *this; - } - - SamplerCreateInfo & setMipmapMode( vk::SamplerMipmapMode mipmapMode_ ) - { - mipmapMode = mipmapMode_; - return *this; - } - - SamplerCreateInfo & setAddressModeU( vk::SamplerAddressMode addressModeU_ ) - { - addressModeU = addressModeU_; - return *this; - } - - SamplerCreateInfo & setAddressModeV( vk::SamplerAddressMode addressModeV_ ) - { - addressModeV = addressModeV_; - return *this; - } - - SamplerCreateInfo & setAddressModeW( vk::SamplerAddressMode addressModeW_ ) - { - addressModeW = addressModeW_; - return *this; - } - - SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) - { - mipLodBias = mipLodBias_; - return *this; - } - - SamplerCreateInfo & setAnisotropyEnable( vk::Bool32 anisotropyEnable_ ) - { - anisotropyEnable = anisotropyEnable_; - return *this; - } - - SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) - { - maxAnisotropy = maxAnisotropy_; - return *this; - } - - SamplerCreateInfo & setCompareEnable( vk::Bool32 compareEnable_ ) - { - compareEnable = compareEnable_; - return *this; - } - - SamplerCreateInfo & setCompareOp( vk::CompareOp compareOp_ ) - { - compareOp = compareOp_; - return *this; - } - - SamplerCreateInfo & setMinLod( float minLod_ ) - { - minLod = minLod_; - return *this; - } - - SamplerCreateInfo & setMaxLod( float maxLod_ ) - { - maxLod = maxLod_; - return *this; - } - - SamplerCreateInfo & setBorderColor( vk::BorderColor borderColor_ ) - { - borderColor = borderColor_; - return *this; - } - - SamplerCreateInfo & setUnnormalizedCoordinates( vk::Bool32 unnormalizedCoordinates_ ) - { - unnormalizedCoordinates = unnormalizedCoordinates_; - return *this; - } - - operator VkSamplerCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSamplerCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( magFilter == rhs.magFilter ) - && ( minFilter == rhs.minFilter ) - && ( mipmapMode == rhs.mipmapMode ) - && ( addressModeU == rhs.addressModeU ) - && ( addressModeV == rhs.addressModeV ) - && ( addressModeW == rhs.addressModeW ) - && ( mipLodBias == rhs.mipLodBias ) - && ( anisotropyEnable == rhs.anisotropyEnable ) - && ( maxAnisotropy == rhs.maxAnisotropy ) - && ( compareEnable == rhs.compareEnable ) - && ( compareOp == rhs.compareOp ) - && ( minLod == rhs.minLod ) - && ( maxLod == rhs.maxLod ) - && ( borderColor == rhs.borderColor ) - && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); - } - - bool operator!=( SamplerCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SamplerCreateInfo::sType; - }; - static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SamplerReductionModeCreateInfoEXT - { - protected: - SamplerReductionModeCreateInfoEXT( vk::SamplerReductionModeEXT reductionMode_ = vk::SamplerReductionModeEXT::eWeightedAverage ) - : reductionMode( reductionMode_ ) - {} - - SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT; - const void* pNext = nullptr; - vk::SamplerReductionModeEXT reductionMode; - }; - static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct SamplerReductionModeCreateInfoEXT : public layout::SamplerReductionModeCreateInfoEXT - { - SamplerReductionModeCreateInfoEXT( vk::SamplerReductionModeEXT reductionMode_ = vk::SamplerReductionModeEXT::eWeightedAverage ) - : layout::SamplerReductionModeCreateInfoEXT( reductionMode_ ) - {} - - SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) - : layout::SamplerReductionModeCreateInfoEXT( rhs ) - {} - - SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SamplerReductionModeCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SamplerReductionModeCreateInfoEXT & setReductionMode( vk::SamplerReductionModeEXT reductionMode_ ) - { - reductionMode = reductionMode_; - return *this; - } - - operator VkSamplerReductionModeCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSamplerReductionModeCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( reductionMode == rhs.reductionMode ); - } - - bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SamplerReductionModeCreateInfoEXT::sType; - }; - static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SamplerYcbcrConversionCreateInfo - { - protected: - SamplerYcbcrConversionCreateInfo( vk::Format format_ = vk::Format::eUndefined, - vk::SamplerYcbcrModelConversion ycbcrModel_ = vk::SamplerYcbcrModelConversion::eRgbIdentity, - vk::SamplerYcbcrRange ycbcrRange_ = vk::SamplerYcbcrRange::eItuFull, - vk::ComponentMapping components_ = vk::ComponentMapping(), - vk::ChromaLocation xChromaOffset_ = vk::ChromaLocation::eCositedEven, - vk::ChromaLocation yChromaOffset_ = vk::ChromaLocation::eCositedEven, - vk::Filter chromaFilter_ = vk::Filter::eNearest, - vk::Bool32 forceExplicitReconstruction_ = 0 ) - : format( format_ ) - , ycbcrModel( ycbcrModel_ ) - , ycbcrRange( ycbcrRange_ ) - , components( components_ ) - , xChromaOffset( xChromaOffset_ ) - , yChromaOffset( yChromaOffset_ ) - , chromaFilter( chromaFilter_ ) - , forceExplicitReconstruction( forceExplicitReconstruction_ ) - {} - - SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; - const void* pNext = nullptr; - vk::Format format; - vk::SamplerYcbcrModelConversion ycbcrModel; - vk::SamplerYcbcrRange ycbcrRange; - vk::ComponentMapping components; - vk::ChromaLocation xChromaOffset; - vk::ChromaLocation yChromaOffset; - vk::Filter chromaFilter; - vk::Bool32 forceExplicitReconstruction; - }; - static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct SamplerYcbcrConversionCreateInfo : public layout::SamplerYcbcrConversionCreateInfo - { - SamplerYcbcrConversionCreateInfo( vk::Format format_ = vk::Format::eUndefined, - vk::SamplerYcbcrModelConversion ycbcrModel_ = vk::SamplerYcbcrModelConversion::eRgbIdentity, - vk::SamplerYcbcrRange ycbcrRange_ = vk::SamplerYcbcrRange::eItuFull, - vk::ComponentMapping components_ = vk::ComponentMapping(), - vk::ChromaLocation xChromaOffset_ = vk::ChromaLocation::eCositedEven, - vk::ChromaLocation yChromaOffset_ = vk::ChromaLocation::eCositedEven, - vk::Filter chromaFilter_ = vk::Filter::eNearest, - vk::Bool32 forceExplicitReconstruction_ = 0 ) - : layout::SamplerYcbcrConversionCreateInfo( format_, ycbcrModel_, ycbcrRange_, components_, xChromaOffset_, yChromaOffset_, chromaFilter_, forceExplicitReconstruction_ ) - {} - - SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) - : layout::SamplerYcbcrConversionCreateInfo( rhs ) - {} - - SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setFormat( vk::Format format_ ) - { - format = format_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYcbcrModel( vk::SamplerYcbcrModelConversion ycbcrModel_ ) - { - ycbcrModel = ycbcrModel_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYcbcrRange( vk::SamplerYcbcrRange ycbcrRange_ ) - { - ycbcrRange = ycbcrRange_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setComponents( vk::ComponentMapping components_ ) - { - components = components_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setXChromaOffset( vk::ChromaLocation xChromaOffset_ ) - { - xChromaOffset = xChromaOffset_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setYChromaOffset( vk::ChromaLocation yChromaOffset_ ) - { - yChromaOffset = yChromaOffset_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setChromaFilter( vk::Filter chromaFilter_ ) - { - chromaFilter = chromaFilter_; - return *this; - } - - SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( vk::Bool32 forceExplicitReconstruction_ ) - { - forceExplicitReconstruction = forceExplicitReconstruction_; - return *this; - } - - operator VkSamplerYcbcrConversionCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSamplerYcbcrConversionCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( format == rhs.format ) - && ( ycbcrModel == rhs.ycbcrModel ) - && ( ycbcrRange == rhs.ycbcrRange ) - && ( components == rhs.components ) - && ( xChromaOffset == rhs.xChromaOffset ) - && ( yChromaOffset == rhs.yChromaOffset ) - && ( chromaFilter == rhs.chromaFilter ) - && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); - } - - bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SamplerYcbcrConversionCreateInfo::sType; - }; - static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SamplerYcbcrConversionImageFormatProperties - { - protected: - SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = 0 ) - : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) - {} - - SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; - void* pNext = nullptr; - uint32_t combinedImageSamplerDescriptorCount; - }; - static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "layout struct and wrapper have different size!" ); - } - - struct SamplerYcbcrConversionImageFormatProperties : public layout::SamplerYcbcrConversionImageFormatProperties - { - operator VkSamplerYcbcrConversionImageFormatProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSamplerYcbcrConversionImageFormatProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); - } - - bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SamplerYcbcrConversionImageFormatProperties::sType; - }; - static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SamplerYcbcrConversionInfo - { - protected: - SamplerYcbcrConversionInfo( vk::SamplerYcbcrConversion conversion_ = vk::SamplerYcbcrConversion() ) - : conversion( conversion_ ) - {} - - SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; - const void* pNext = nullptr; - vk::SamplerYcbcrConversion conversion; - }; - static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "layout struct and wrapper have different size!" ); - } - - struct SamplerYcbcrConversionInfo : public layout::SamplerYcbcrConversionInfo - { - SamplerYcbcrConversionInfo( vk::SamplerYcbcrConversion conversion_ = vk::SamplerYcbcrConversion() ) - : layout::SamplerYcbcrConversionInfo( conversion_ ) - {} - - SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) - : layout::SamplerYcbcrConversionInfo( rhs ) - {} - - SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SamplerYcbcrConversionInfo & setConversion( vk::SamplerYcbcrConversion conversion_ ) - { - conversion = conversion_; - return *this; - } - - operator VkSamplerYcbcrConversionInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSamplerYcbcrConversionInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SamplerYcbcrConversionInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( conversion == rhs.conversion ); - } - - bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SamplerYcbcrConversionInfo::sType; - }; - static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SemaphoreCreateInfo - { - protected: - SemaphoreCreateInfo( vk::SemaphoreCreateFlags flags_ = vk::SemaphoreCreateFlags() ) - : flags( flags_ ) - {} - - SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSemaphoreCreateInfo; - const void* pNext = nullptr; - vk::SemaphoreCreateFlags flags; - }; - static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct SemaphoreCreateInfo : public layout::SemaphoreCreateInfo - { - SemaphoreCreateInfo( vk::SemaphoreCreateFlags flags_ = vk::SemaphoreCreateFlags() ) - : layout::SemaphoreCreateInfo( flags_ ) - {} - - SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) - : layout::SemaphoreCreateInfo( rhs ) - {} - - SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SemaphoreCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SemaphoreCreateInfo & setFlags( vk::SemaphoreCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - operator VkSemaphoreCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ); - } - - bool operator!=( SemaphoreCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SemaphoreCreateInfo::sType; - }; - static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SemaphoreGetFdInfoKHR - { - protected: - SemaphoreGetFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) - : semaphore( semaphore_ ) - , handleType( handleType_ ) - {} - - SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; - const void* pNext = nullptr; - vk::Semaphore semaphore; - vk::ExternalSemaphoreHandleTypeFlagBits handleType; - }; - static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct SemaphoreGetFdInfoKHR : public layout::SemaphoreGetFdInfoKHR - { - SemaphoreGetFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) - : layout::SemaphoreGetFdInfoKHR( semaphore_, handleType_ ) - {} - - SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) - : layout::SemaphoreGetFdInfoKHR( rhs ) - {} - - SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SemaphoreGetFdInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) - { - semaphore = semaphore_; - return *this; - } - - SemaphoreGetFdInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkSemaphoreGetFdInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreGetFdInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SemaphoreGetFdInfoKHR::sType; - }; - static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct SemaphoreGetWin32HandleInfoKHR - { - protected: - SemaphoreGetWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) - : semaphore( semaphore_ ) - , handleType( handleType_ ) - {} - - SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - const void* pNext = nullptr; - vk::Semaphore semaphore; - vk::ExternalSemaphoreHandleTypeFlagBits handleType; - }; - static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct SemaphoreGetWin32HandleInfoKHR : public layout::SemaphoreGetWin32HandleInfoKHR - { - SemaphoreGetWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(), - vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) - : layout::SemaphoreGetWin32HandleInfoKHR( semaphore_, handleType_ ) - {} - - SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) - : layout::SemaphoreGetWin32HandleInfoKHR( rhs ) - {} - - SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) - { - semaphore = semaphore_; - return *this; - } - - SemaphoreGetWin32HandleInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) - { - handleType = handleType_; - return *this; - } - - operator VkSemaphoreGetWin32HandleInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSemaphoreGetWin32HandleInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( semaphore == rhs.semaphore ) - && ( handleType == rhs.handleType ); - } - - bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SemaphoreGetWin32HandleInfoKHR::sType; - }; - static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct ShaderModuleCreateInfo - { - protected: - ShaderModuleCreateInfo( vk::ShaderModuleCreateFlags flags_ = vk::ShaderModuleCreateFlags(), - size_t codeSize_ = 0, - const uint32_t* pCode_ = nullptr ) - : flags( flags_ ) - , codeSize( codeSize_ ) - , pCode( pCode_ ) - {} - - ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eShaderModuleCreateInfo; - const void* pNext = nullptr; - vk::ShaderModuleCreateFlags flags; - size_t codeSize; - const uint32_t* pCode; - }; - static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "layout struct and wrapper have different size!" ); - } - - struct ShaderModuleCreateInfo : public layout::ShaderModuleCreateInfo - { - ShaderModuleCreateInfo( vk::ShaderModuleCreateFlags flags_ = vk::ShaderModuleCreateFlags(), - size_t codeSize_ = 0, - const uint32_t* pCode_ = nullptr ) - : layout::ShaderModuleCreateInfo( flags_, codeSize_, pCode_ ) - {} - - ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) - : layout::ShaderModuleCreateInfo( rhs ) - {} - - ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ShaderModuleCreateInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ShaderModuleCreateInfo & setFlags( vk::ShaderModuleCreateFlags flags_ ) - { - flags = flags_; - return *this; - } - - ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) - { - codeSize = codeSize_; - return *this; - } - - ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) - { - pCode = pCode_; - return *this; - } - - operator VkShaderModuleCreateInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkShaderModuleCreateInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ShaderModuleCreateInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( codeSize == rhs.codeSize ) - && ( pCode == rhs.pCode ); - } - - bool operator!=( ShaderModuleCreateInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ShaderModuleCreateInfo::sType; - }; - static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ShaderModuleValidationCacheCreateInfoEXT - { - protected: - ShaderModuleValidationCacheCreateInfoEXT( vk::ValidationCacheEXT validationCache_ = vk::ValidationCacheEXT() ) - : validationCache( validationCache_ ) - {} - - ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; - const void* pNext = nullptr; - vk::ValidationCacheEXT validationCache; - }; - static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ShaderModuleValidationCacheCreateInfoEXT : public layout::ShaderModuleValidationCacheCreateInfoEXT - { - ShaderModuleValidationCacheCreateInfoEXT( vk::ValidationCacheEXT validationCache_ = vk::ValidationCacheEXT() ) - : layout::ShaderModuleValidationCacheCreateInfoEXT( validationCache_ ) - {} - - ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) - : layout::ShaderModuleValidationCacheCreateInfoEXT( rhs ) - {} - - ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( vk::ValidationCacheEXT validationCache_ ) - { - validationCache = validationCache_; - return *this; - } - - operator VkShaderModuleValidationCacheCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkShaderModuleValidationCacheCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( validationCache == rhs.validationCache ); - } - - bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ShaderModuleValidationCacheCreateInfoEXT::sType; - }; - static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ShaderResourceUsageAMD - { - operator VkShaderResourceUsageAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkShaderResourceUsageAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ShaderResourceUsageAMD const& rhs ) const - { - return ( numUsedVgprs == rhs.numUsedVgprs ) - && ( numUsedSgprs == rhs.numUsedSgprs ) - && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) - && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) - && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); - } - - bool operator!=( ShaderResourceUsageAMD const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t numUsedVgprs; - uint32_t numUsedSgprs; - uint32_t ldsSizePerLocalWorkGroup; - size_t ldsUsageSizeInBytes; - size_t scratchMemUsageInBytes; - }; - static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct ShaderStatisticsInfoAMD - { - operator VkShaderStatisticsInfoAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkShaderStatisticsInfoAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ShaderStatisticsInfoAMD const& rhs ) const - { - return ( shaderStageMask == rhs.shaderStageMask ) - && ( resourceUsage == rhs.resourceUsage ) - && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) - && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) - && ( numAvailableVgprs == rhs.numAvailableVgprs ) - && ( numAvailableSgprs == rhs.numAvailableSgprs ) - && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ); - } - - bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ShaderStageFlags shaderStageMask; - vk::ShaderResourceUsageAMD resourceUsage; - uint32_t numPhysicalVgprs; - uint32_t numPhysicalSgprs; - uint32_t numAvailableVgprs; - uint32_t numAvailableSgprs; - uint32_t computeWorkGroupSize[3]; - }; - static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SharedPresentSurfaceCapabilitiesKHR - { - protected: - SharedPresentSurfaceCapabilitiesKHR( vk::ImageUsageFlags sharedPresentSupportedUsageFlags_ = vk::ImageUsageFlags() ) - : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) - {} - - SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; - void* pNext = nullptr; - vk::ImageUsageFlags sharedPresentSupportedUsageFlags; - }; - static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct SharedPresentSurfaceCapabilitiesKHR : public layout::SharedPresentSurfaceCapabilitiesKHR - { - operator VkSharedPresentSurfaceCapabilitiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSharedPresentSurfaceCapabilitiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); - } - - bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SharedPresentSurfaceCapabilitiesKHR::sType; - }; - static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageFormatProperties - { - operator VkSparseImageFormatProperties const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageFormatProperties &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageFormatProperties const& rhs ) const - { - return ( aspectMask == rhs.aspectMask ) - && ( imageGranularity == rhs.imageGranularity ) - && ( flags == rhs.flags ); - } - - bool operator!=( SparseImageFormatProperties const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::ImageAspectFlags aspectMask; - vk::Extent3D imageGranularity; - vk::SparseImageFormatFlags flags; - }; - static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SparseImageFormatProperties2 - { - protected: - SparseImageFormatProperties2( vk::SparseImageFormatProperties properties_ = vk::SparseImageFormatProperties() ) - : properties( properties_ ) - {} - - SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSparseImageFormatProperties2; - void* pNext = nullptr; - vk::SparseImageFormatProperties properties; - }; - static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "layout struct and wrapper have different size!" ); - } - - struct SparseImageFormatProperties2 : public layout::SparseImageFormatProperties2 - { - operator VkSparseImageFormatProperties2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageFormatProperties2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageFormatProperties2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( properties == rhs.properties ); - } - - bool operator!=( SparseImageFormatProperties2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SparseImageFormatProperties2::sType; - }; - static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SparseImageMemoryRequirements - { - operator VkSparseImageMemoryRequirements const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryRequirements &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageMemoryRequirements const& rhs ) const - { - return ( formatProperties == rhs.formatProperties ) - && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) - && ( imageMipTailSize == rhs.imageMipTailSize ) - && ( imageMipTailOffset == rhs.imageMipTailOffset ) - && ( imageMipTailStride == rhs.imageMipTailStride ); - } - - bool operator!=( SparseImageMemoryRequirements const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::SparseImageFormatProperties formatProperties; - uint32_t imageMipTailFirstLod; - vk::DeviceSize imageMipTailSize; - vk::DeviceSize imageMipTailOffset; - vk::DeviceSize imageMipTailStride; - }; - static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SparseImageMemoryRequirements2 - { - protected: - SparseImageMemoryRequirements2( vk::SparseImageMemoryRequirements memoryRequirements_ = vk::SparseImageMemoryRequirements() ) - : memoryRequirements( memoryRequirements_ ) - {} - - SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSparseImageMemoryRequirements2; - void* pNext = nullptr; - vk::SparseImageMemoryRequirements memoryRequirements; - }; - static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "layout struct and wrapper have different size!" ); - } - - struct SparseImageMemoryRequirements2 : public layout::SparseImageMemoryRequirements2 - { - operator VkSparseImageMemoryRequirements2 const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSparseImageMemoryRequirements2 &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SparseImageMemoryRequirements2 const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( memoryRequirements == rhs.memoryRequirements ); - } - - bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SparseImageMemoryRequirements2::sType; - }; - static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_GGP - - namespace layout - { - struct StreamDescriptorSurfaceCreateInfoGGP - { - protected: - StreamDescriptorSurfaceCreateInfoGGP( vk::StreamDescriptorSurfaceCreateFlagsGGP flags_ = vk::StreamDescriptorSurfaceCreateFlagsGGP(), - GgpStreamDescriptor streamDescriptor_ = 0 ) - : flags( flags_ ) - , streamDescriptor( streamDescriptor_ ) - {} - - StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; - const void* pNext = nullptr; - vk::StreamDescriptorSurfaceCreateFlagsGGP flags; - GgpStreamDescriptor streamDescriptor; - }; - static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "layout struct and wrapper have different size!" ); - } - - struct StreamDescriptorSurfaceCreateInfoGGP : public layout::StreamDescriptorSurfaceCreateInfoGGP - { - StreamDescriptorSurfaceCreateInfoGGP( vk::StreamDescriptorSurfaceCreateFlagsGGP flags_ = vk::StreamDescriptorSurfaceCreateFlagsGGP(), - GgpStreamDescriptor streamDescriptor_ = 0 ) - : layout::StreamDescriptorSurfaceCreateInfoGGP( flags_, streamDescriptor_ ) - {} - - StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) - : layout::StreamDescriptorSurfaceCreateInfoGGP( rhs ) - {} - - StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - StreamDescriptorSurfaceCreateInfoGGP & setFlags( vk::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) - { - flags = flags_; - return *this; - } - - StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) - { - streamDescriptor = streamDescriptor_; - return *this; - } - - operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const - { - return *reinterpret_cast( this ); - } - - operator VkStreamDescriptorSurfaceCreateInfoGGP &() - { - return *reinterpret_cast( this ); - } - - bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( streamDescriptor == rhs.streamDescriptor ); - } - - bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::StreamDescriptorSurfaceCreateInfoGGP::sType; - }; - static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_GGP*/ - - namespace layout - { - struct SubmitInfo - { - protected: - SubmitInfo( uint32_t waitSemaphoreCount_ = 0, - const vk::Semaphore* pWaitSemaphores_ = nullptr, - const vk::PipelineStageFlags* pWaitDstStageMask_ = nullptr, - uint32_t commandBufferCount_ = 0, - const vk::CommandBuffer* pCommandBuffers_ = nullptr, - uint32_t signalSemaphoreCount_ = 0, - const vk::Semaphore* pSignalSemaphores_ = nullptr ) - : waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , pWaitDstStageMask( pWaitDstStageMask_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBuffers( pCommandBuffers_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) - {} - - SubmitInfo( VkSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubmitInfo& operator=( VkSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSubmitInfo; - const void* pNext = nullptr; - uint32_t waitSemaphoreCount; - const vk::Semaphore* pWaitSemaphores; - const vk::PipelineStageFlags* pWaitDstStageMask; - uint32_t commandBufferCount; - const vk::CommandBuffer* pCommandBuffers; - uint32_t signalSemaphoreCount; - const vk::Semaphore* pSignalSemaphores; - }; - static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "layout struct and wrapper have different size!" ); - } - - struct SubmitInfo : public layout::SubmitInfo - { - SubmitInfo( uint32_t waitSemaphoreCount_ = 0, - const vk::Semaphore* pWaitSemaphores_ = nullptr, - const vk::PipelineStageFlags* pWaitDstStageMask_ = nullptr, - uint32_t commandBufferCount_ = 0, - const vk::CommandBuffer* pCommandBuffers_ = nullptr, - uint32_t signalSemaphoreCount_ = 0, - const vk::Semaphore* pSignalSemaphores_ = nullptr ) - : layout::SubmitInfo( waitSemaphoreCount_, pWaitSemaphores_, pWaitDstStageMask_, commandBufferCount_, pCommandBuffers_, signalSemaphoreCount_, pSignalSemaphores_ ) - {} - - SubmitInfo( VkSubmitInfo const & rhs ) - : layout::SubmitInfo( rhs ) - {} - - SubmitInfo& operator=( VkSubmitInfo const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubmitInfo & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) - { - waitSemaphoreCount = waitSemaphoreCount_; - return *this; - } - - SubmitInfo & setPWaitSemaphores( const vk::Semaphore* pWaitSemaphores_ ) - { - pWaitSemaphores = pWaitSemaphores_; - return *this; - } - - SubmitInfo & setPWaitDstStageMask( const vk::PipelineStageFlags* pWaitDstStageMask_ ) - { - pWaitDstStageMask = pWaitDstStageMask_; - return *this; - } - - SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) - { - commandBufferCount = commandBufferCount_; - return *this; - } - - SubmitInfo & setPCommandBuffers( const vk::CommandBuffer* pCommandBuffers_ ) - { - pCommandBuffers = pCommandBuffers_; - return *this; - } - - SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) - { - signalSemaphoreCount = signalSemaphoreCount_; - return *this; - } - - SubmitInfo & setPSignalSemaphores( const vk::Semaphore* pSignalSemaphores_ ) - { - pSignalSemaphores = pSignalSemaphores_; - return *this; - } - - operator VkSubmitInfo const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubmitInfo &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubmitInfo const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) - && ( pWaitSemaphores == rhs.pWaitSemaphores ) - && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) - && ( commandBufferCount == rhs.commandBufferCount ) - && ( pCommandBuffers == rhs.pCommandBuffers ) - && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) - && ( pSignalSemaphores == rhs.pSignalSemaphores ); - } - - bool operator!=( SubmitInfo const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SubmitInfo::sType; - }; - static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SubpassBeginInfoKHR - { - protected: - SubpassBeginInfoKHR( vk::SubpassContents contents_ = vk::SubpassContents::eInline ) - : contents( contents_ ) - {} - - SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSubpassBeginInfoKHR; - const void* pNext = nullptr; - vk::SubpassContents contents; - }; - static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct SubpassBeginInfoKHR : public layout::SubpassBeginInfoKHR - { - SubpassBeginInfoKHR( vk::SubpassContents contents_ = vk::SubpassContents::eInline ) - : layout::SubpassBeginInfoKHR( contents_ ) - {} - - SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) - : layout::SubpassBeginInfoKHR( rhs ) - {} - - SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassBeginInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SubpassBeginInfoKHR & setContents( vk::SubpassContents contents_ ) - { - contents = contents_; - return *this; - } - - operator VkSubpassBeginInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassBeginInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassBeginInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( contents == rhs.contents ); - } - - bool operator!=( SubpassBeginInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SubpassBeginInfoKHR::sType; - }; - static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SubpassDescriptionDepthStencilResolveKHR - { - protected: - SubpassDescriptionDepthStencilResolveKHR( vk::ResolveModeFlagBitsKHR depthResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone, - vk::ResolveModeFlagBitsKHR stencilResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone, - const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment_ = nullptr ) - : depthResolveMode( depthResolveMode_ ) - , stencilResolveMode( stencilResolveMode_ ) - , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) - {} - - SubpassDescriptionDepthStencilResolveKHR( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassDescriptionDepthStencilResolveKHR& operator=( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolveKHR; - const void* pNext = nullptr; - vk::ResolveModeFlagBitsKHR depthResolveMode; - vk::ResolveModeFlagBitsKHR stencilResolveMode; - const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment; - }; - static_assert( sizeof( SubpassDescriptionDepthStencilResolveKHR ) == sizeof( VkSubpassDescriptionDepthStencilResolveKHR ), "layout struct and wrapper have different size!" ); - } - - struct SubpassDescriptionDepthStencilResolveKHR : public layout::SubpassDescriptionDepthStencilResolveKHR - { - SubpassDescriptionDepthStencilResolveKHR( vk::ResolveModeFlagBitsKHR depthResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone, - vk::ResolveModeFlagBitsKHR stencilResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone, - const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment_ = nullptr ) - : layout::SubpassDescriptionDepthStencilResolveKHR( depthResolveMode_, stencilResolveMode_, pDepthStencilResolveAttachment_ ) - {} - - SubpassDescriptionDepthStencilResolveKHR( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) - : layout::SubpassDescriptionDepthStencilResolveKHR( rhs ) - {} - - SubpassDescriptionDepthStencilResolveKHR& operator=( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassDescriptionDepthStencilResolveKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SubpassDescriptionDepthStencilResolveKHR & setDepthResolveMode( vk::ResolveModeFlagBitsKHR depthResolveMode_ ) - { - depthResolveMode = depthResolveMode_; - return *this; - } - - SubpassDescriptionDepthStencilResolveKHR & setStencilResolveMode( vk::ResolveModeFlagBitsKHR stencilResolveMode_ ) - { - stencilResolveMode = stencilResolveMode_; - return *this; - } - - SubpassDescriptionDepthStencilResolveKHR & setPDepthStencilResolveAttachment( const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment_ ) - { - pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; - return *this; - } - - operator VkSubpassDescriptionDepthStencilResolveKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassDescriptionDepthStencilResolveKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( depthResolveMode == rhs.depthResolveMode ) - && ( stencilResolveMode == rhs.stencilResolveMode ) - && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); - } - - bool operator!=( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SubpassDescriptionDepthStencilResolveKHR::sType; - }; - static_assert( sizeof( SubpassDescriptionDepthStencilResolveKHR ) == sizeof( VkSubpassDescriptionDepthStencilResolveKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SubpassEndInfoKHR - { - protected: - SubpassEndInfoKHR() - - {} - - SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSubpassEndInfoKHR; - const void* pNext = nullptr; - }; - static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct SubpassEndInfoKHR : public layout::SubpassEndInfoKHR - { - SubpassEndInfoKHR() - - : layout::SubpassEndInfoKHR( ) - {} - - SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) - : layout::SubpassEndInfoKHR( rhs ) - {} - - SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SubpassEndInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - operator VkSubpassEndInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSubpassEndInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SubpassEndInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ); - } - - bool operator!=( SubpassEndInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SubpassEndInfoKHR::sType; - }; - static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SurfaceCapabilities2EXT - { - protected: - SurfaceCapabilities2EXT( uint32_t minImageCount_ = 0, - uint32_t maxImageCount_ = 0, - vk::Extent2D currentExtent_ = vk::Extent2D(), - vk::Extent2D minImageExtent_ = vk::Extent2D(), - vk::Extent2D maxImageExtent_ = vk::Extent2D(), - uint32_t maxImageArrayLayers_ = 0, - vk::SurfaceTransformFlagsKHR supportedTransforms_ = vk::SurfaceTransformFlagsKHR(), - vk::SurfaceTransformFlagBitsKHR currentTransform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity, - vk::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = vk::CompositeAlphaFlagsKHR(), - vk::ImageUsageFlags supportedUsageFlags_ = vk::ImageUsageFlags(), - vk::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = vk::SurfaceCounterFlagsEXT() ) - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - , supportedSurfaceCounters( supportedSurfaceCounters_ ) - {} - - SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceCapabilities2EXT; - void* pNext = nullptr; - uint32_t minImageCount; - uint32_t maxImageCount; - vk::Extent2D currentExtent; - vk::Extent2D minImageExtent; - vk::Extent2D maxImageExtent; - uint32_t maxImageArrayLayers; - vk::SurfaceTransformFlagsKHR supportedTransforms; - vk::SurfaceTransformFlagBitsKHR currentTransform; - vk::CompositeAlphaFlagsKHR supportedCompositeAlpha; - vk::ImageUsageFlags supportedUsageFlags; - vk::SurfaceCounterFlagsEXT supportedSurfaceCounters; - }; - static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceCapabilities2EXT : public layout::SurfaceCapabilities2EXT - { - operator VkSurfaceCapabilities2EXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilities2EXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilities2EXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ) - && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); - } - - bool operator!=( SurfaceCapabilities2EXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceCapabilities2EXT::sType; - }; - static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - struct SurfaceCapabilitiesKHR - { - operator VkSurfaceCapabilitiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilitiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilitiesKHR const& rhs ) const - { - return ( minImageCount == rhs.minImageCount ) - && ( maxImageCount == rhs.maxImageCount ) - && ( currentExtent == rhs.currentExtent ) - && ( minImageExtent == rhs.minImageExtent ) - && ( maxImageExtent == rhs.maxImageExtent ) - && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) - && ( supportedTransforms == rhs.supportedTransforms ) - && ( currentTransform == rhs.currentTransform ) - && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) - && ( supportedUsageFlags == rhs.supportedUsageFlags ); - } - - bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - uint32_t minImageCount; - uint32_t maxImageCount; - vk::Extent2D currentExtent; - vk::Extent2D minImageExtent; - vk::Extent2D maxImageExtent; - uint32_t maxImageArrayLayers; - vk::SurfaceTransformFlagsKHR supportedTransforms; - vk::SurfaceTransformFlagBitsKHR currentTransform; - vk::CompositeAlphaFlagsKHR supportedCompositeAlpha; - vk::ImageUsageFlags supportedUsageFlags; - }; - static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SurfaceCapabilities2KHR - { - protected: - SurfaceCapabilities2KHR( vk::SurfaceCapabilitiesKHR surfaceCapabilities_ = vk::SurfaceCapabilitiesKHR() ) - : surfaceCapabilities( surfaceCapabilities_ ) - {} - - SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceCapabilities2KHR; - void* pNext = nullptr; - vk::SurfaceCapabilitiesKHR surfaceCapabilities; - }; - static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceCapabilities2KHR : public layout::SurfaceCapabilities2KHR - { - operator VkSurfaceCapabilities2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilities2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilities2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCapabilities == rhs.surfaceCapabilities ); - } - - bool operator!=( SurfaceCapabilities2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceCapabilities2KHR::sType; - }; - static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct SurfaceCapabilitiesFullScreenExclusiveEXT - { - protected: - SurfaceCapabilitiesFullScreenExclusiveEXT( vk::Bool32 fullScreenExclusiveSupported_ = 0 ) - : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) - {} - - SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; - void* pNext = nullptr; - vk::Bool32 fullScreenExclusiveSupported; - }; - static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceCapabilitiesFullScreenExclusiveEXT : public layout::SurfaceCapabilitiesFullScreenExclusiveEXT - { - SurfaceCapabilitiesFullScreenExclusiveEXT( vk::Bool32 fullScreenExclusiveSupported_ = 0 ) - : layout::SurfaceCapabilitiesFullScreenExclusiveEXT( fullScreenExclusiveSupported_ ) - {} - - SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) - : layout::SurfaceCapabilitiesFullScreenExclusiveEXT( rhs ) - {} - - SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( vk::Bool32 fullScreenExclusiveSupported_ ) - { - fullScreenExclusiveSupported = fullScreenExclusiveSupported_; - return *this; - } - - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); - } - - bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceCapabilitiesFullScreenExclusiveEXT::sType; - }; - static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct SurfaceFormatKHR - { - operator VkSurfaceFormatKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFormatKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceFormatKHR const& rhs ) const - { - return ( format == rhs.format ) - && ( colorSpace == rhs.colorSpace ); - } - - bool operator!=( SurfaceFormatKHR const& rhs ) const - { - return !operator==( rhs ); - } - - public: - vk::Format format; - vk::ColorSpaceKHR colorSpace; - }; - static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SurfaceFormat2KHR - { - protected: - SurfaceFormat2KHR( vk::SurfaceFormatKHR surfaceFormat_ = vk::SurfaceFormatKHR() ) - : surfaceFormat( surfaceFormat_ ) - {} - - SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceFormat2KHR; - void* pNext = nullptr; - vk::SurfaceFormatKHR surfaceFormat; - }; - static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceFormat2KHR : public layout::SurfaceFormat2KHR - { - operator VkSurfaceFormat2KHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFormat2KHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceFormat2KHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceFormat == rhs.surfaceFormat ); - } - - bool operator!=( SurfaceFormat2KHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceFormat2KHR::sType; - }; - static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct SurfaceFullScreenExclusiveInfoEXT - { - protected: - SurfaceFullScreenExclusiveInfoEXT( vk::FullScreenExclusiveEXT fullScreenExclusive_ = vk::FullScreenExclusiveEXT::eDefault ) - : fullScreenExclusive( fullScreenExclusive_ ) - {} - - SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; - void* pNext = nullptr; - vk::FullScreenExclusiveEXT fullScreenExclusive; - }; - static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceFullScreenExclusiveInfoEXT : public layout::SurfaceFullScreenExclusiveInfoEXT - { - SurfaceFullScreenExclusiveInfoEXT( vk::FullScreenExclusiveEXT fullScreenExclusive_ = vk::FullScreenExclusiveEXT::eDefault ) - : layout::SurfaceFullScreenExclusiveInfoEXT( fullScreenExclusive_ ) - {} - - SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) - : layout::SurfaceFullScreenExclusiveInfoEXT( rhs ) - {} - - SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( vk::FullScreenExclusiveEXT fullScreenExclusive_ ) - { - fullScreenExclusive = fullScreenExclusive_; - return *this; - } - - operator VkSurfaceFullScreenExclusiveInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFullScreenExclusiveInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( fullScreenExclusive == rhs.fullScreenExclusive ); - } - - bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceFullScreenExclusiveInfoEXT::sType; - }; - static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct SurfaceFullScreenExclusiveWin32InfoEXT - { - protected: - SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = 0 ) - : hmonitor( hmonitor_ ) - {} - - SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; - const void* pNext = nullptr; - HMONITOR hmonitor; - }; - static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceFullScreenExclusiveWin32InfoEXT : public layout::SurfaceFullScreenExclusiveWin32InfoEXT - { - SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = 0 ) - : layout::SurfaceFullScreenExclusiveWin32InfoEXT( hmonitor_ ) - {} - - SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) - : layout::SurfaceFullScreenExclusiveWin32InfoEXT( rhs ) - {} - - SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) - { - hmonitor = hmonitor_; - return *this; - } - - operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( hmonitor == rhs.hmonitor ); - } - - bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceFullScreenExclusiveWin32InfoEXT::sType; - }; - static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct SurfaceProtectedCapabilitiesKHR - { - protected: - SurfaceProtectedCapabilitiesKHR( vk::Bool32 supportsProtected_ = 0 ) - : supportsProtected( supportsProtected_ ) - {} - - SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; - const void* pNext = nullptr; - vk::Bool32 supportsProtected; - }; - static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "layout struct and wrapper have different size!" ); - } - - struct SurfaceProtectedCapabilitiesKHR : public layout::SurfaceProtectedCapabilitiesKHR - { - SurfaceProtectedCapabilitiesKHR( vk::Bool32 supportsProtected_ = 0 ) - : layout::SurfaceProtectedCapabilitiesKHR( supportsProtected_ ) - {} - - SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) - : layout::SurfaceProtectedCapabilitiesKHR( rhs ) - {} - - SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SurfaceProtectedCapabilitiesKHR & setSupportsProtected( vk::Bool32 supportsProtected_ ) - { - supportsProtected = supportsProtected_; - return *this; - } - - operator VkSurfaceProtectedCapabilitiesKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSurfaceProtectedCapabilitiesKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportsProtected == rhs.supportsProtected ); - } - - bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SurfaceProtectedCapabilitiesKHR::sType; - }; - static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SwapchainCounterCreateInfoEXT - { - protected: - SwapchainCounterCreateInfoEXT( vk::SurfaceCounterFlagsEXT surfaceCounters_ = vk::SurfaceCounterFlagsEXT() ) - : surfaceCounters( surfaceCounters_ ) - {} - - SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; - const void* pNext = nullptr; - vk::SurfaceCounterFlagsEXT surfaceCounters; - }; - static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct SwapchainCounterCreateInfoEXT : public layout::SwapchainCounterCreateInfoEXT - { - SwapchainCounterCreateInfoEXT( vk::SurfaceCounterFlagsEXT surfaceCounters_ = vk::SurfaceCounterFlagsEXT() ) - : layout::SwapchainCounterCreateInfoEXT( surfaceCounters_ ) - {} - - SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) - : layout::SwapchainCounterCreateInfoEXT( rhs ) - {} - - SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SwapchainCounterCreateInfoEXT & setSurfaceCounters( vk::SurfaceCounterFlagsEXT surfaceCounters_ ) - { - surfaceCounters = surfaceCounters_; - return *this; - } - - operator VkSwapchainCounterCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSwapchainCounterCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( surfaceCounters == rhs.surfaceCounters ); - } - - bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SwapchainCounterCreateInfoEXT::sType; - }; - static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SwapchainCreateInfoKHR - { - protected: - SwapchainCreateInfoKHR( vk::SwapchainCreateFlagsKHR flags_ = vk::SwapchainCreateFlagsKHR(), - vk::SurfaceKHR surface_ = vk::SurfaceKHR(), - uint32_t minImageCount_ = 0, - vk::Format imageFormat_ = vk::Format::eUndefined, - vk::ColorSpaceKHR imageColorSpace_ = vk::ColorSpaceKHR::eSrgbNonlinear, - vk::Extent2D imageExtent_ = vk::Extent2D(), - uint32_t imageArrayLayers_ = 0, - vk::ImageUsageFlags imageUsage_ = vk::ImageUsageFlags(), - vk::SharingMode imageSharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr, - vk::SurfaceTransformFlagBitsKHR preTransform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity, - vk::CompositeAlphaFlagBitsKHR compositeAlpha_ = vk::CompositeAlphaFlagBitsKHR::eOpaque, - vk::PresentModeKHR presentMode_ = vk::PresentModeKHR::eImmediate, - vk::Bool32 clipped_ = 0, - vk::SwapchainKHR oldSwapchain_ = vk::SwapchainKHR() ) - : flags( flags_ ) - , surface( surface_ ) - , minImageCount( minImageCount_ ) - , imageFormat( imageFormat_ ) - , imageColorSpace( imageColorSpace_ ) - , imageExtent( imageExtent_ ) - , imageArrayLayers( imageArrayLayers_ ) - , imageUsage( imageUsage_ ) - , imageSharingMode( imageSharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , preTransform( preTransform_ ) - , compositeAlpha( compositeAlpha_ ) - , presentMode( presentMode_ ) - , clipped( clipped_ ) - , oldSwapchain( oldSwapchain_ ) - {} - - SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSwapchainCreateInfoKHR; - const void* pNext = nullptr; - vk::SwapchainCreateFlagsKHR flags; - vk::SurfaceKHR surface; - uint32_t minImageCount; - vk::Format imageFormat; - vk::ColorSpaceKHR imageColorSpace; - vk::Extent2D imageExtent; - uint32_t imageArrayLayers; - vk::ImageUsageFlags imageUsage; - vk::SharingMode imageSharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - vk::SurfaceTransformFlagBitsKHR preTransform; - vk::CompositeAlphaFlagBitsKHR compositeAlpha; - vk::PresentModeKHR presentMode; - vk::Bool32 clipped; - vk::SwapchainKHR oldSwapchain; - }; - static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct SwapchainCreateInfoKHR : public layout::SwapchainCreateInfoKHR - { - SwapchainCreateInfoKHR( vk::SwapchainCreateFlagsKHR flags_ = vk::SwapchainCreateFlagsKHR(), - vk::SurfaceKHR surface_ = vk::SurfaceKHR(), - uint32_t minImageCount_ = 0, - vk::Format imageFormat_ = vk::Format::eUndefined, - vk::ColorSpaceKHR imageColorSpace_ = vk::ColorSpaceKHR::eSrgbNonlinear, - vk::Extent2D imageExtent_ = vk::Extent2D(), - uint32_t imageArrayLayers_ = 0, - vk::ImageUsageFlags imageUsage_ = vk::ImageUsageFlags(), - vk::SharingMode imageSharingMode_ = vk::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = 0, - const uint32_t* pQueueFamilyIndices_ = nullptr, - vk::SurfaceTransformFlagBitsKHR preTransform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity, - vk::CompositeAlphaFlagBitsKHR compositeAlpha_ = vk::CompositeAlphaFlagBitsKHR::eOpaque, - vk::PresentModeKHR presentMode_ = vk::PresentModeKHR::eImmediate, - vk::Bool32 clipped_ = 0, - vk::SwapchainKHR oldSwapchain_ = vk::SwapchainKHR() ) - : layout::SwapchainCreateInfoKHR( flags_, surface_, minImageCount_, imageFormat_, imageColorSpace_, imageExtent_, imageArrayLayers_, imageUsage_, imageSharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_, preTransform_, compositeAlpha_, presentMode_, clipped_, oldSwapchain_ ) - {} - - SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) - : layout::SwapchainCreateInfoKHR( rhs ) - {} - - SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SwapchainCreateInfoKHR & setFlags( vk::SwapchainCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - SwapchainCreateInfoKHR & setSurface( vk::SurfaceKHR surface_ ) - { - surface = surface_; - return *this; - } - - SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) - { - minImageCount = minImageCount_; - return *this; - } - - SwapchainCreateInfoKHR & setImageFormat( vk::Format imageFormat_ ) - { - imageFormat = imageFormat_; - return *this; - } - - SwapchainCreateInfoKHR & setImageColorSpace( vk::ColorSpaceKHR imageColorSpace_ ) - { - imageColorSpace = imageColorSpace_; - return *this; - } - - SwapchainCreateInfoKHR & setImageExtent( vk::Extent2D imageExtent_ ) - { - imageExtent = imageExtent_; - return *this; - } - - SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) - { - imageArrayLayers = imageArrayLayers_; - return *this; - } - - SwapchainCreateInfoKHR & setImageUsage( vk::ImageUsageFlags imageUsage_ ) - { - imageUsage = imageUsage_; - return *this; - } - - SwapchainCreateInfoKHR & setImageSharingMode( vk::SharingMode imageSharingMode_ ) - { - imageSharingMode = imageSharingMode_; - return *this; - } - - SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) - { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; - } - - SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) - { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; - } - - SwapchainCreateInfoKHR & setPreTransform( vk::SurfaceTransformFlagBitsKHR preTransform_ ) - { - preTransform = preTransform_; - return *this; - } - - SwapchainCreateInfoKHR & setCompositeAlpha( vk::CompositeAlphaFlagBitsKHR compositeAlpha_ ) - { - compositeAlpha = compositeAlpha_; - return *this; - } - - SwapchainCreateInfoKHR & setPresentMode( vk::PresentModeKHR presentMode_ ) - { - presentMode = presentMode_; - return *this; - } - - SwapchainCreateInfoKHR & setClipped( vk::Bool32 clipped_ ) - { - clipped = clipped_; - return *this; - } - - SwapchainCreateInfoKHR & setOldSwapchain( vk::SwapchainKHR oldSwapchain_ ) - { - oldSwapchain = oldSwapchain_; - return *this; - } - - operator VkSwapchainCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSwapchainCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SwapchainCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( surface == rhs.surface ) - && ( minImageCount == rhs.minImageCount ) - && ( imageFormat == rhs.imageFormat ) - && ( imageColorSpace == rhs.imageColorSpace ) - && ( imageExtent == rhs.imageExtent ) - && ( imageArrayLayers == rhs.imageArrayLayers ) - && ( imageUsage == rhs.imageUsage ) - && ( imageSharingMode == rhs.imageSharingMode ) - && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) - && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) - && ( preTransform == rhs.preTransform ) - && ( compositeAlpha == rhs.compositeAlpha ) - && ( presentMode == rhs.presentMode ) - && ( clipped == rhs.clipped ) - && ( oldSwapchain == rhs.oldSwapchain ); - } - - bool operator!=( SwapchainCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SwapchainCreateInfoKHR::sType; - }; - static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct SwapchainDisplayNativeHdrCreateInfoAMD - { - protected: - SwapchainDisplayNativeHdrCreateInfoAMD( vk::Bool32 localDimmingEnable_ = 0 ) - : localDimmingEnable( localDimmingEnable_ ) - {} - - SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; - const void* pNext = nullptr; - vk::Bool32 localDimmingEnable; - }; - static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "layout struct and wrapper have different size!" ); - } - - struct SwapchainDisplayNativeHdrCreateInfoAMD : public layout::SwapchainDisplayNativeHdrCreateInfoAMD - { - SwapchainDisplayNativeHdrCreateInfoAMD( vk::Bool32 localDimmingEnable_ = 0 ) - : layout::SwapchainDisplayNativeHdrCreateInfoAMD( localDimmingEnable_ ) - {} - - SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) - : layout::SwapchainDisplayNativeHdrCreateInfoAMD( rhs ) - {} - - SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( vk::Bool32 localDimmingEnable_ ) - { - localDimmingEnable = localDimmingEnable_; - return *this; - } - - operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( localDimmingEnable == rhs.localDimmingEnable ); - } - - bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::SwapchainDisplayNativeHdrCreateInfoAMD::sType; - }; - static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct TextureLODGatherFormatPropertiesAMD - { - protected: - TextureLODGatherFormatPropertiesAMD( vk::Bool32 supportsTextureGatherLODBiasAMD_ = 0 ) - : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) - {} - - TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; - void* pNext = nullptr; - vk::Bool32 supportsTextureGatherLODBiasAMD; - }; - static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "layout struct and wrapper have different size!" ); - } - - struct TextureLODGatherFormatPropertiesAMD : public layout::TextureLODGatherFormatPropertiesAMD - { - operator VkTextureLODGatherFormatPropertiesAMD const&() const - { - return *reinterpret_cast( this ); - } - - operator VkTextureLODGatherFormatPropertiesAMD &() - { - return *reinterpret_cast( this ); - } - - bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); - } - - bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::TextureLODGatherFormatPropertiesAMD::sType; - }; - static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ValidationCacheCreateInfoEXT - { - protected: - ValidationCacheCreateInfoEXT( vk::ValidationCacheCreateFlagsEXT flags_ = vk::ValidationCacheCreateFlagsEXT(), - size_t initialDataSize_ = 0, - const void* pInitialData_ = nullptr ) - : flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) - {} - - ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; - const void* pNext = nullptr; - vk::ValidationCacheCreateFlagsEXT flags; - size_t initialDataSize; - const void* pInitialData; - }; - static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "layout struct and wrapper have different size!" ); - } - - struct ValidationCacheCreateInfoEXT : public layout::ValidationCacheCreateInfoEXT - { - ValidationCacheCreateInfoEXT( vk::ValidationCacheCreateFlagsEXT flags_ = vk::ValidationCacheCreateFlagsEXT(), - size_t initialDataSize_ = 0, - const void* pInitialData_ = nullptr ) - : layout::ValidationCacheCreateInfoEXT( flags_, initialDataSize_, pInitialData_ ) - {} - - ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) - : layout::ValidationCacheCreateInfoEXT( rhs ) - {} - - ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ValidationCacheCreateInfoEXT & setFlags( vk::ValidationCacheCreateFlagsEXT flags_ ) - { - flags = flags_; - return *this; - } - - ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) - { - initialDataSize = initialDataSize_; - return *this; - } - - ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) - { - pInitialData = pInitialData_; - return *this; - } - - operator VkValidationCacheCreateInfoEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkValidationCacheCreateInfoEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( initialDataSize == rhs.initialDataSize ) - && ( pInitialData == rhs.pInitialData ); - } - - bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ValidationCacheCreateInfoEXT::sType; - }; - static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ValidationFeaturesEXT - { - protected: - ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = 0, - const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = nullptr, - uint32_t disabledValidationFeatureCount_ = 0, - const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = nullptr ) - : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) - , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) - , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) - , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) - {} - - ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eValidationFeaturesEXT; - const void* pNext = nullptr; - uint32_t enabledValidationFeatureCount; - const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures; - uint32_t disabledValidationFeatureCount; - const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures; - }; - static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "layout struct and wrapper have different size!" ); - } - - struct ValidationFeaturesEXT : public layout::ValidationFeaturesEXT - { - ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = 0, - const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = nullptr, - uint32_t disabledValidationFeatureCount_ = 0, - const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = nullptr ) - : layout::ValidationFeaturesEXT( enabledValidationFeatureCount_, pEnabledValidationFeatures_, disabledValidationFeatureCount_, pDisabledValidationFeatures_ ) - {} - - ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) - : layout::ValidationFeaturesEXT( rhs ) - {} - - ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ValidationFeaturesEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) - { - enabledValidationFeatureCount = enabledValidationFeatureCount_; - return *this; - } - - ValidationFeaturesEXT & setPEnabledValidationFeatures( const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) - { - pEnabledValidationFeatures = pEnabledValidationFeatures_; - return *this; - } - - ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) - { - disabledValidationFeatureCount = disabledValidationFeatureCount_; - return *this; - } - - ValidationFeaturesEXT & setPDisabledValidationFeatures( const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) - { - pDisabledValidationFeatures = pDisabledValidationFeatures_; - return *this; - } - - operator VkValidationFeaturesEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkValidationFeaturesEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ValidationFeaturesEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) - && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) - && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) - && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); - } - - bool operator!=( ValidationFeaturesEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ValidationFeaturesEXT::sType; - }; - static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct ValidationFlagsEXT - { - protected: - ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, - const vk::ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) - : disabledValidationCheckCount( disabledValidationCheckCount_ ) - , pDisabledValidationChecks( pDisabledValidationChecks_ ) - {} - - ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eValidationFlagsEXT; - const void* pNext = nullptr; - uint32_t disabledValidationCheckCount; - const vk::ValidationCheckEXT* pDisabledValidationChecks; - }; - static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "layout struct and wrapper have different size!" ); - } - - struct ValidationFlagsEXT : public layout::ValidationFlagsEXT - { - ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, - const vk::ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) - : layout::ValidationFlagsEXT( disabledValidationCheckCount_, pDisabledValidationChecks_ ) - {} - - ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) - : layout::ValidationFlagsEXT( rhs ) - {} - - ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ValidationFlagsEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) - { - disabledValidationCheckCount = disabledValidationCheckCount_; - return *this; - } - - ValidationFlagsEXT & setPDisabledValidationChecks( const vk::ValidationCheckEXT* pDisabledValidationChecks_ ) - { - pDisabledValidationChecks = pDisabledValidationChecks_; - return *this; - } - - operator VkValidationFlagsEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkValidationFlagsEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ValidationFlagsEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) - && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); - } - - bool operator!=( ValidationFlagsEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ValidationFlagsEXT::sType; - }; - static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_VI_NN - - namespace layout - { - struct ViSurfaceCreateInfoNN - { - protected: - ViSurfaceCreateInfoNN( vk::ViSurfaceCreateFlagsNN flags_ = vk::ViSurfaceCreateFlagsNN(), - void* window_ = nullptr ) - : flags( flags_ ) - , window( window_ ) - {} - - ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eViSurfaceCreateInfoNN; - const void* pNext = nullptr; - vk::ViSurfaceCreateFlagsNN flags; - void* window; - }; - static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "layout struct and wrapper have different size!" ); - } - - struct ViSurfaceCreateInfoNN : public layout::ViSurfaceCreateInfoNN - { - ViSurfaceCreateInfoNN( vk::ViSurfaceCreateFlagsNN flags_ = vk::ViSurfaceCreateFlagsNN(), - void* window_ = nullptr ) - : layout::ViSurfaceCreateInfoNN( flags_, window_ ) - {} - - ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) - : layout::ViSurfaceCreateInfoNN( rhs ) - {} - - ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - ViSurfaceCreateInfoNN & setFlags( vk::ViSurfaceCreateFlagsNN flags_ ) - { - flags = flags_; - return *this; - } - - ViSurfaceCreateInfoNN & setWindow( void* window_ ) - { - window = window_; - return *this; - } - - operator VkViSurfaceCreateInfoNN const&() const - { - return *reinterpret_cast( this ); - } - - operator VkViSurfaceCreateInfoNN &() - { - return *reinterpret_cast( this ); - } - - bool operator==( ViSurfaceCreateInfoNN const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( window == rhs.window ); - } - - bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::ViSurfaceCreateInfoNN::sType; - }; - static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - - namespace layout - { - struct WaylandSurfaceCreateInfoKHR - { - protected: - WaylandSurfaceCreateInfoKHR( vk::WaylandSurfaceCreateFlagsKHR flags_ = vk::WaylandSurfaceCreateFlagsKHR(), - struct wl_display* display_ = nullptr, - struct wl_surface* surface_ = nullptr ) - : flags( flags_ ) - , display( display_ ) - , surface( surface_ ) - {} - - WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; - const void* pNext = nullptr; - vk::WaylandSurfaceCreateFlagsKHR flags; - struct wl_display* display; - struct wl_surface* surface; - }; - static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct WaylandSurfaceCreateInfoKHR : public layout::WaylandSurfaceCreateInfoKHR - { - WaylandSurfaceCreateInfoKHR( vk::WaylandSurfaceCreateFlagsKHR flags_ = vk::WaylandSurfaceCreateFlagsKHR(), - struct wl_display* display_ = nullptr, - struct wl_surface* surface_ = nullptr ) - : layout::WaylandSurfaceCreateInfoKHR( flags_, display_, surface_ ) - {} - - WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) - : layout::WaylandSurfaceCreateInfoKHR( rhs ) - {} - - WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - WaylandSurfaceCreateInfoKHR & setFlags( vk::WaylandSurfaceCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) - { - display = display_; - return *this; - } - - WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) - { - surface = surface_; - return *this; - } - - operator VkWaylandSurfaceCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWaylandSurfaceCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( display == rhs.display ) - && ( surface == rhs.surface ); - } - - bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::WaylandSurfaceCreateInfoKHR::sType; - }; - static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct Win32KeyedMutexAcquireReleaseInfoKHR - { - protected: - Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0, - const vk::DeviceMemory* pAcquireSyncs_ = nullptr, - const uint64_t* pAcquireKeys_ = nullptr, - const uint32_t* pAcquireTimeouts_ = nullptr, - uint32_t releaseCount_ = 0, - const vk::DeviceMemory* pReleaseSyncs_ = nullptr, - const uint64_t* pReleaseKeys_ = nullptr ) - : acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeouts( pAcquireTimeouts_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) - {} - - Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; - const void* pNext = nullptr; - uint32_t acquireCount; - const vk::DeviceMemory* pAcquireSyncs; - const uint64_t* pAcquireKeys; - const uint32_t* pAcquireTimeouts; - uint32_t releaseCount; - const vk::DeviceMemory* pReleaseSyncs; - const uint64_t* pReleaseKeys; - }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct Win32KeyedMutexAcquireReleaseInfoKHR : public layout::Win32KeyedMutexAcquireReleaseInfoKHR - { - Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0, - const vk::DeviceMemory* pAcquireSyncs_ = nullptr, - const uint64_t* pAcquireKeys_ = nullptr, - const uint32_t* pAcquireTimeouts_ = nullptr, - uint32_t releaseCount_ = 0, - const vk::DeviceMemory* pReleaseSyncs_ = nullptr, - const uint64_t* pReleaseKeys_ = nullptr ) - : layout::Win32KeyedMutexAcquireReleaseInfoKHR( acquireCount_, pAcquireSyncs_, pAcquireKeys_, pAcquireTimeouts_, releaseCount_, pReleaseSyncs_, pReleaseKeys_ ) - {} - - Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) - : layout::Win32KeyedMutexAcquireReleaseInfoKHR( rhs ) - {} - - Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) - { - acquireCount = acquireCount_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const vk::DeviceMemory* pAcquireSyncs_ ) - { - pAcquireSyncs = pAcquireSyncs_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) - { - pAcquireKeys = pAcquireKeys_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) - { - pAcquireTimeouts = pAcquireTimeouts_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) - { - releaseCount = releaseCount_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const vk::DeviceMemory* pReleaseSyncs_ ) - { - pReleaseSyncs = pReleaseSyncs_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) - { - pReleaseKeys = pReleaseKeys_; - return *this; - } - - operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( acquireCount == rhs.acquireCount ) - && ( pAcquireSyncs == rhs.pAcquireSyncs ) - && ( pAcquireKeys == rhs.pAcquireKeys ) - && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) - && ( releaseCount == rhs.releaseCount ) - && ( pReleaseSyncs == rhs.pReleaseSyncs ) - && ( pReleaseKeys == rhs.pReleaseKeys ); - } - - bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::Win32KeyedMutexAcquireReleaseInfoKHR::sType; - }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct Win32KeyedMutexAcquireReleaseInfoNV - { - protected: - Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, - const vk::DeviceMemory* pAcquireSyncs_ = nullptr, - const uint64_t* pAcquireKeys_ = nullptr, - const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, - uint32_t releaseCount_ = 0, - const vk::DeviceMemory* pReleaseSyncs_ = nullptr, - const uint64_t* pReleaseKeys_ = nullptr ) - : acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) - {} - - Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; - const void* pNext = nullptr; - uint32_t acquireCount; - const vk::DeviceMemory* pAcquireSyncs; - const uint64_t* pAcquireKeys; - const uint32_t* pAcquireTimeoutMilliseconds; - uint32_t releaseCount; - const vk::DeviceMemory* pReleaseSyncs; - const uint64_t* pReleaseKeys; - }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "layout struct and wrapper have different size!" ); - } - - struct Win32KeyedMutexAcquireReleaseInfoNV : public layout::Win32KeyedMutexAcquireReleaseInfoNV - { - Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, - const vk::DeviceMemory* pAcquireSyncs_ = nullptr, - const uint64_t* pAcquireKeys_ = nullptr, - const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, - uint32_t releaseCount_ = 0, - const vk::DeviceMemory* pReleaseSyncs_ = nullptr, - const uint64_t* pReleaseKeys_ = nullptr ) - : layout::Win32KeyedMutexAcquireReleaseInfoNV( acquireCount_, pAcquireSyncs_, pAcquireKeys_, pAcquireTimeoutMilliseconds_, releaseCount_, pReleaseSyncs_, pReleaseKeys_ ) - {} - - Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) - : layout::Win32KeyedMutexAcquireReleaseInfoNV( rhs ) - {} - - Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) - { - acquireCount = acquireCount_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const vk::DeviceMemory* pAcquireSyncs_ ) - { - pAcquireSyncs = pAcquireSyncs_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) - { - pAcquireKeys = pAcquireKeys_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) - { - pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) - { - releaseCount = releaseCount_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const vk::DeviceMemory* pReleaseSyncs_ ) - { - pReleaseSyncs = pReleaseSyncs_; - return *this; - } - - Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) - { - pReleaseKeys = pReleaseKeys_; - return *this; - } - - operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWin32KeyedMutexAcquireReleaseInfoNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( acquireCount == rhs.acquireCount ) - && ( pAcquireSyncs == rhs.pAcquireSyncs ) - && ( pAcquireKeys == rhs.pAcquireKeys ) - && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) - && ( releaseCount == rhs.releaseCount ) - && ( pReleaseSyncs == rhs.pReleaseSyncs ) - && ( pReleaseKeys == rhs.pReleaseKeys ); - } - - bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::Win32KeyedMutexAcquireReleaseInfoNV::sType; - }; - static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - - namespace layout - { - struct Win32SurfaceCreateInfoKHR - { - protected: - Win32SurfaceCreateInfoKHR( vk::Win32SurfaceCreateFlagsKHR flags_ = vk::Win32SurfaceCreateFlagsKHR(), - HINSTANCE hinstance_ = 0, - HWND hwnd_ = 0 ) - : flags( flags_ ) - , hinstance( hinstance_ ) - , hwnd( hwnd_ ) - {} - - Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; - const void* pNext = nullptr; - vk::Win32SurfaceCreateFlagsKHR flags; - HINSTANCE hinstance; - HWND hwnd; - }; - static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct Win32SurfaceCreateInfoKHR : public layout::Win32SurfaceCreateInfoKHR - { - Win32SurfaceCreateInfoKHR( vk::Win32SurfaceCreateFlagsKHR flags_ = vk::Win32SurfaceCreateFlagsKHR(), - HINSTANCE hinstance_ = 0, - HWND hwnd_ = 0 ) - : layout::Win32SurfaceCreateInfoKHR( flags_, hinstance_, hwnd_ ) - {} - - Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) - : layout::Win32SurfaceCreateInfoKHR( rhs ) - {} - - Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - Win32SurfaceCreateInfoKHR & setFlags( vk::Win32SurfaceCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) - { - hinstance = hinstance_; - return *this; - } - - Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) - { - hwnd = hwnd_; - return *this; - } - - operator VkWin32SurfaceCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWin32SurfaceCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( hinstance == rhs.hinstance ) - && ( hwnd == rhs.hwnd ); - } - - bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::Win32SurfaceCreateInfoKHR::sType; - }; - static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - namespace layout - { - struct WriteDescriptorSet - { - protected: - WriteDescriptorSet( vk::DescriptorSet dstSet_ = vk::DescriptorSet(), - uint32_t dstBinding_ = 0, - uint32_t dstArrayElement_ = 0, - uint32_t descriptorCount_ = 0, - vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler, - const vk::DescriptorImageInfo* pImageInfo_ = nullptr, - const vk::DescriptorBufferInfo* pBufferInfo_ = nullptr, - const vk::BufferView* pTexelBufferView_ = nullptr ) - : dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , pImageInfo( pImageInfo_ ) - , pBufferInfo( pBufferInfo_ ) - , pTexelBufferView( pTexelBufferView_ ) - {} - - WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWriteDescriptorSet; - const void* pNext = nullptr; - vk::DescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - vk::DescriptorType descriptorType; - const vk::DescriptorImageInfo* pImageInfo; - const vk::DescriptorBufferInfo* pBufferInfo; - const vk::BufferView* pTexelBufferView; - }; - static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "layout struct and wrapper have different size!" ); - } - - struct WriteDescriptorSet : public layout::WriteDescriptorSet - { - WriteDescriptorSet( vk::DescriptorSet dstSet_ = vk::DescriptorSet(), - uint32_t dstBinding_ = 0, - uint32_t dstArrayElement_ = 0, - uint32_t descriptorCount_ = 0, - vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler, - const vk::DescriptorImageInfo* pImageInfo_ = nullptr, - const vk::DescriptorBufferInfo* pBufferInfo_ = nullptr, - const vk::BufferView* pTexelBufferView_ = nullptr ) - : layout::WriteDescriptorSet( dstSet_, dstBinding_, dstArrayElement_, descriptorCount_, descriptorType_, pImageInfo_, pBufferInfo_, pTexelBufferView_ ) - {} - - WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) - : layout::WriteDescriptorSet( rhs ) - {} - - WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - WriteDescriptorSet & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - WriteDescriptorSet & setDstSet( vk::DescriptorSet dstSet_ ) - { - dstSet = dstSet_; - return *this; - } - - WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) - { - dstBinding = dstBinding_; - return *this; - } - - WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) - { - dstArrayElement = dstArrayElement_; - return *this; - } - - WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) - { - descriptorCount = descriptorCount_; - return *this; - } - - WriteDescriptorSet & setDescriptorType( vk::DescriptorType descriptorType_ ) - { - descriptorType = descriptorType_; - return *this; - } - - WriteDescriptorSet & setPImageInfo( const vk::DescriptorImageInfo* pImageInfo_ ) - { - pImageInfo = pImageInfo_; - return *this; - } - - WriteDescriptorSet & setPBufferInfo( const vk::DescriptorBufferInfo* pBufferInfo_ ) - { - pBufferInfo = pBufferInfo_; - return *this; - } - - WriteDescriptorSet & setPTexelBufferView( const vk::BufferView* pTexelBufferView_ ) - { - pTexelBufferView = pTexelBufferView_; - return *this; - } - - operator VkWriteDescriptorSet const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWriteDescriptorSet &() - { - return *reinterpret_cast( this ); - } - - bool operator==( WriteDescriptorSet const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dstSet == rhs.dstSet ) - && ( dstBinding == rhs.dstBinding ) - && ( dstArrayElement == rhs.dstArrayElement ) - && ( descriptorCount == rhs.descriptorCount ) - && ( descriptorType == rhs.descriptorType ) - && ( pImageInfo == rhs.pImageInfo ) - && ( pBufferInfo == rhs.pBufferInfo ) - && ( pTexelBufferView == rhs.pTexelBufferView ); - } - - bool operator!=( WriteDescriptorSet const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::WriteDescriptorSet::sType; - }; - static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct WriteDescriptorSetAccelerationStructureNV - { - protected: - WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0, - const vk::AccelerationStructureNV* pAccelerationStructures_ = nullptr ) - : accelerationStructureCount( accelerationStructureCount_ ) - , pAccelerationStructures( pAccelerationStructures_ ) - {} - - WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; - const void* pNext = nullptr; - uint32_t accelerationStructureCount; - const vk::AccelerationStructureNV* pAccelerationStructures; - }; - static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "layout struct and wrapper have different size!" ); - } - - struct WriteDescriptorSetAccelerationStructureNV : public layout::WriteDescriptorSetAccelerationStructureNV - { - WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0, - const vk::AccelerationStructureNV* pAccelerationStructures_ = nullptr ) - : layout::WriteDescriptorSetAccelerationStructureNV( accelerationStructureCount_, pAccelerationStructures_ ) - {} - - WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) - : layout::WriteDescriptorSetAccelerationStructureNV( rhs ) - {} - - WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) - { - accelerationStructureCount = accelerationStructureCount_; - return *this; - } - - WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const vk::AccelerationStructureNV* pAccelerationStructures_ ) - { - pAccelerationStructures = pAccelerationStructures_; - return *this; - } - - operator VkWriteDescriptorSetAccelerationStructureNV const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWriteDescriptorSetAccelerationStructureNV &() - { - return *reinterpret_cast( this ); - } - - bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( accelerationStructureCount == rhs.accelerationStructureCount ) - && ( pAccelerationStructures == rhs.pAccelerationStructures ); - } - - bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::WriteDescriptorSetAccelerationStructureNV::sType; - }; - static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - - namespace layout - { - struct WriteDescriptorSetInlineUniformBlockEXT - { - protected: - WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0, - const void* pData_ = nullptr ) - : dataSize( dataSize_ ) - , pData( pData_ ) - {} - - WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; - const void* pNext = nullptr; - uint32_t dataSize; - const void* pData; - }; - static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "layout struct and wrapper have different size!" ); - } - - struct WriteDescriptorSetInlineUniformBlockEXT : public layout::WriteDescriptorSetInlineUniformBlockEXT - { - WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0, - const void* pData_ = nullptr ) - : layout::WriteDescriptorSetInlineUniformBlockEXT( dataSize_, pData_ ) - {} - - WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) - : layout::WriteDescriptorSetInlineUniformBlockEXT( rhs ) - {} - - WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) - { - dataSize = dataSize_; - return *this; - } - - WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) - { - pData = pData_; - return *this; - } - - operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const - { - return *reinterpret_cast( this ); - } - - operator VkWriteDescriptorSetInlineUniformBlockEXT &() - { - return *reinterpret_cast( this ); - } - - bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( dataSize == rhs.dataSize ) - && ( pData == rhs.pData ); - } - - bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::WriteDescriptorSetInlineUniformBlockEXT::sType; - }; - static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); - -#ifdef VK_USE_PLATFORM_XCB_KHR - - namespace layout - { - struct XcbSurfaceCreateInfoKHR - { - protected: - XcbSurfaceCreateInfoKHR( vk::XcbSurfaceCreateFlagsKHR flags_ = vk::XcbSurfaceCreateFlagsKHR(), - xcb_connection_t* connection_ = nullptr, - xcb_window_t window_ = 0 ) - : flags( flags_ ) - , connection( connection_ ) - , window( window_ ) - {} - - XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; - const void* pNext = nullptr; - vk::XcbSurfaceCreateFlagsKHR flags; - xcb_connection_t* connection; - xcb_window_t window; - }; - static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct XcbSurfaceCreateInfoKHR : public layout::XcbSurfaceCreateInfoKHR - { - XcbSurfaceCreateInfoKHR( vk::XcbSurfaceCreateFlagsKHR flags_ = vk::XcbSurfaceCreateFlagsKHR(), - xcb_connection_t* connection_ = nullptr, - xcb_window_t window_ = 0 ) - : layout::XcbSurfaceCreateInfoKHR( flags_, connection_, window_ ) - {} - - XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) - : layout::XcbSurfaceCreateInfoKHR( rhs ) - {} - - XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - XcbSurfaceCreateInfoKHR & setFlags( vk::XcbSurfaceCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) - { - connection = connection_; - return *this; - } - - XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) - { - window = window_; - return *this; - } - - operator VkXcbSurfaceCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkXcbSurfaceCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( connection == rhs.connection ) - && ( window == rhs.window ); - } - - bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::XcbSurfaceCreateInfoKHR::sType; - }; - static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - - namespace layout - { - struct XlibSurfaceCreateInfoKHR - { - protected: - XlibSurfaceCreateInfoKHR( vk::XlibSurfaceCreateFlagsKHR flags_ = vk::XlibSurfaceCreateFlagsKHR(), - Display* dpy_ = nullptr, - Window window_ = 0 ) - : flags( flags_ ) - , dpy( dpy_ ) - , window( window_ ) - {} - - XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - } - - XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - public: - vk::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; - const void* pNext = nullptr; - vk::XlibSurfaceCreateFlagsKHR flags; - Display* dpy; - Window window; - }; - static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" ); - } - - struct XlibSurfaceCreateInfoKHR : public layout::XlibSurfaceCreateInfoKHR - { - XlibSurfaceCreateInfoKHR( vk::XlibSurfaceCreateFlagsKHR flags_ = vk::XlibSurfaceCreateFlagsKHR(), - Display* dpy_ = nullptr, - Window window_ = 0 ) - : layout::XlibSurfaceCreateInfoKHR( flags_, dpy_, window_ ) - {} - - XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) - : layout::XlibSurfaceCreateInfoKHR( rhs ) - {} - - XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) - { - *reinterpret_cast(this) = rhs; - return *this; - } - - XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) - { - pNext = pNext_; - return *this; - } - - XlibSurfaceCreateInfoKHR & setFlags( vk::XlibSurfaceCreateFlagsKHR flags_ ) - { - flags = flags_; - return *this; - } - - XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) - { - dpy = dpy_; - return *this; - } - - XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) - { - window = window_; - return *this; - } - - operator VkXlibSurfaceCreateInfoKHR const&() const - { - return *reinterpret_cast( this ); - } - - operator VkXlibSurfaceCreateInfoKHR &() - { - return *reinterpret_cast( this ); - } - - bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const - { - return ( sType == rhs.sType ) - && ( pNext == rhs.pNext ) - && ( flags == rhs.flags ) - && ( dpy == rhs.dpy ) - && ( window == rhs.window ); - } - - bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const - { - return !operator==( rhs ); - } - - private: - using layout::XlibSurfaceCreateInfoKHR::sType; - }; - static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); - static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - template - VULKAN_HPP_INLINE Result createInstance( const vk::InstanceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Instance* pInstance, Dispatch const &d) - { - return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) - { - vk::Instance instance; - Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) - { - vk::Instance instance; - Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - - ObjectDestroy deleter( allocator, d ); - return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d) - { - return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d) - { - return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) - { - return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type enumerateInstanceVersion(Dispatch const &d ) - { - uint32_t apiVersion; - Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); - return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result CommandBuffer::begin( const vk::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const - { - return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const vk::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const - { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const - { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const - { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const - { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, Dispatch const &d) const - { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, Dispatch const &d ) const - { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, uint32_t index, Dispatch const &d) const - { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const - { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const vk::RenderPassBeginInfo* pRenderPassBegin, vk::SubpassContents contents, Dispatch const &d) const - { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, vk::SubpassContents contents, Dispatch const &d ) const - { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const vk::RenderPassBeginInfo* pRenderPassBegin, const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const - { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const - { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const - { - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); -#else - if ( counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const - { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const - { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( vk::Buffer buffer, vk::DeviceSize offset, vk::IndexType indexType, Dispatch const &d) const - { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( vk::Buffer buffer, vk::DeviceSize offset, vk::IndexType indexType, Dispatch const &d ) const - { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( vk::PipelineBindPoint pipelineBindPoint, vk::Pipeline pipeline, Dispatch const &d) const - { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( vk::PipelineBindPoint pipelineBindPoint, vk::Pipeline pipeline, Dispatch const &d ) const - { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( vk::ImageView imageView, vk::ImageLayout imageLayout, Dispatch const &d) const - { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( vk::ImageView imageView, vk::ImageLayout imageLayout, Dispatch const &d ) const - { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, const vk::DeviceSize* pSizes, Dispatch const &d) const - { - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); -#else - if ( buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); -#else - if ( offsets.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, Dispatch const &d) const - { - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageBlit* pRegions, vk::Filter filter, Dispatch const &d) const - { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, vk::Filter filter, Dispatch const &d ) const - { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const vk::AccelerationStructureInfoNV* pInfo, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d) const - { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d ) const - { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const vk::ClearAttachment* pAttachments, uint32_t rectCount, const vk::ClearRect* pRects, Dispatch const &d) const - { - d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const - { - d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearColorValue* pColor, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d) const - { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const - { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d) const - { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const - { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::CopyAccelerationStructureModeNV mode, Dispatch const &d) const - { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const - { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferCopy* pRegions, Dispatch const &d) const - { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const - { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d) const - { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const - { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageCopy* pRegions, Dispatch const &d) const - { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const - { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d) const - { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const - { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d) const - { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d ) const - { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const - { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const - { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const - { - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const - { - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const - { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const - { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const - { - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const - { - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const - { - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const - { - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const - { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const - { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( vk::Buffer buffer, vk::DeviceSize offset, Dispatch const &d) const - { - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( vk::Buffer buffer, vk::DeviceSize offset, Dispatch const &d ) const - { - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const - { - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const - { - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const - { - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const - { - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, vk::Buffer counterBuffer, vk::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const - { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, vk::Buffer counterBuffer, vk::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const - { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const - { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const - { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const - { - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const - { - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const - { - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const - { - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const - { - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const - { - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( vk::QueryPool queryPool, uint32_t query, Dispatch const &d) const - { - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( vk::QueryPool queryPool, uint32_t query, Dispatch const &d ) const - { - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const - { - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const - { - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const - { - d.vkCmdEndRenderPass( m_commandBuffer ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const - { - d.vkCmdEndRenderPass( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const - { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const - { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const - { - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); -#else - if ( counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const - { - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const - { - d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize size, uint32_t data, Dispatch const &d) const - { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize size, uint32_t data, Dispatch const &d ) const - { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const - { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const - { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( vk::SubpassContents contents, Dispatch const &d) const - { - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( vk::SubpassContents contents, Dispatch const &d ) const - { - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const - { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const - { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const - { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const - { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const vk::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const - { - d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( pProcessCommandsInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const - { - d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( &processCommandsInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const - { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const - { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const - { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const - { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, vk::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const - { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, vk::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const - { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const vk::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const - { - d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( pReserveSpaceInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const - { - d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( &reserveSpaceInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d) const - { - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d ) const - { - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const - { - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const - { - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageResolve* pRegions, Dispatch const &d) const - { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const - { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const - { - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const - { - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const - { - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const - { - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const - { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d ) const - { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const - { - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const - { - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const - { - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const - { - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const - { - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const - { - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const - { - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const - { - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const vk::Rect2D* pDiscardRectangles, Dispatch const &d) const - { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const - { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d) const - { - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d ) const - { - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const vk::Rect2D* pExclusiveScissors, Dispatch const &d) const - { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d ) const - { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const - { - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const - { - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const - { - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const - { - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const vk::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const - { - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const vk::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const - { - return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const vk::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const - { - return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const vk::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const - { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const - { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const vk::Rect2D* pScissors, Dispatch const &d) const - { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const - { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( vk::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const - { - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( vk::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const - { - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( vk::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const - { - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( vk::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const - { - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( vk::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const - { - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( vk::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const - { - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const vk::Viewport* pViewports, Dispatch const &d) const - { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const - { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const - { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d ) const - { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const - { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const - { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( vk::Buffer raygenShaderBindingTableBuffer, vk::DeviceSize raygenShaderBindingOffset, vk::Buffer missShaderBindingTableBuffer, vk::DeviceSize missShaderBindingOffset, vk::DeviceSize missShaderBindingStride, vk::Buffer hitShaderBindingTableBuffer, vk::DeviceSize hitShaderBindingOffset, vk::DeviceSize hitShaderBindingStride, vk::Buffer callableShaderBindingTableBuffer, vk::DeviceSize callableShaderBindingOffset, vk::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const - { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( vk::Buffer raygenShaderBindingTableBuffer, vk::DeviceSize raygenShaderBindingOffset, vk::Buffer missShaderBindingTableBuffer, vk::DeviceSize missShaderBindingOffset, vk::DeviceSize missShaderBindingStride, vk::Buffer hitShaderBindingTableBuffer, vk::DeviceSize hitShaderBindingOffset, vk::DeviceSize hitShaderBindingStride, vk::Buffer callableShaderBindingTableBuffer, vk::DeviceSize callableShaderBindingOffset, vk::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const - { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize dataSize, const void* pData, Dispatch const &d) const - { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const - { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const vk::Event* pEvents, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const - { - d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const - { - d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const vk::AccelerationStructureNV* pAccelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const - { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const - { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( vk::PipelineStageFlagBits pipelineStage, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const - { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( vk::PipelineStageFlagBits pipelineStage, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const - { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( vk::PipelineStageFlagBits pipelineStage, vk::QueryPool queryPool, uint32_t query, Dispatch const &d) const - { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( vk::PipelineStageFlagBits pipelineStage, vk::QueryPool queryPool, uint32_t query, Dispatch const &d ) const - { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const - { - return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::end(Dispatch const &d ) const - { - Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::reset( vk::CommandBufferResetFlags flags, Dispatch const &d) const - { - return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::reset( vk::CommandBufferResetFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d) const - { - return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d ) const - { - Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const vk::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const - { - return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const - { - uint32_t imageIndex; - Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); - return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const - { - return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, Dispatch const &d ) const - { - uint32_t imageIndex; - Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); - return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const vk::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, vk::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const - { - return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const - { - vk::PerformanceConfigurationINTEL configuration; - Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); - return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING"::Device::acquirePerformanceConfigurationINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const vk::CommandBufferAllocateInfo* pAllocateInfo, vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const - { - return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const - { - std::vector commandBuffers( allocateInfo.commandBufferCount ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); - Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const - { - static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle ), "CommandBuffer is greater than UniqueHandle!" ); - std::vector, Allocator> commandBuffers; - commandBuffers.reserve( allocateInfo.commandBufferCount ); - CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle ) - sizeof( CommandBuffer ) ) ); - Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - PoolFree deleter( *this, allocateInfo.commandPool, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle ), "CommandBuffer is greater than UniqueHandle!" ); - std::vector, Allocator> commandBuffers( vectorAllocator ); - commandBuffers.reserve( allocateInfo.commandBufferCount ); - CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle ) - sizeof( CommandBuffer ) ) ); - Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - PoolFree deleter( *this, allocateInfo.commandPool, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const vk::DescriptorSetAllocateInfo* pAllocateInfo, vk::DescriptorSet* pDescriptorSets, Dispatch const &d) const - { - return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const - { - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); - Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const - { - static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle ), "DescriptorSet is greater than UniqueHandle!" ); - std::vector, Allocator> descriptorSets; - descriptorSets.reserve( allocateInfo.descriptorSetCount ); - DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle ) - sizeof( DescriptorSet ) ) ); - Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle ), "DescriptorSet is greater than UniqueHandle!" ); - std::vector, Allocator> descriptorSets( vectorAllocator ); - descriptorSets.reserve( allocateInfo.descriptorSetCount ); - DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle ) - sizeof( DescriptorSet ) ) ); - Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::allocateMemory( const vk::MemoryAllocateInfo* pAllocateInfo, const vk::AllocationCallbacks* pAllocator, vk::DeviceMemory* pMemory, Dispatch const &d) const - { - return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const - { - vk::DeviceMemory memory; - Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const - { - vk::DeviceMemory memory; - Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - - ObjectFree deleter( *this, allocator, d ); - return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const vk::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const - { - return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d) const - { - return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const - { - return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const - { - return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d) const - { - return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const - { - return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const - { - return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const - { - Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d) const - { - return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const - { - Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const vk::AccelerationStructureCreateInfoNV* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const - { - return static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::AccelerationStructureNV accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::AccelerationStructureNV accelerationStructure; - Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createBuffer( const vk::BufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Buffer* pBuffer, Dispatch const &d) const - { - return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Buffer buffer; - Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Buffer buffer; - Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createBufferView( const vk::BufferViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::BufferView* pView, Dispatch const &d) const - { - return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::BufferView view; - Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::BufferView view; - Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createCommandPool( const vk::CommandPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::CommandPool* pCommandPool, Dispatch const &d) const - { - return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::CommandPool commandPool; - Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::CommandPool commandPool; - Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createComputePipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::ComputePipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d) const - { - return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); - } - template - VULKAN_HPP_INLINE ResultValueType::type Device::createComputePipeline( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines; - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines( vectorAllocator ); - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelineUnique( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createDescriptorPool( const vk::DescriptorPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorPool* pDescriptorPool, Dispatch const &d) const - { - return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorPool descriptorPool; - Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorPool descriptorPool; - Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorSetLayout* pSetLayout, Dispatch const &d) const - { - return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorSetLayout setLayout; - Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorSetLayout setLayout; - Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const - { - return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const - { - return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DescriptorUpdateTemplate descriptorUpdateTemplate; - Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createEvent( const vk::EventCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Event* pEvent, Dispatch const &d) const - { - return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Event event; - Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Event event; - Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createFence( const vk::FenceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d) const - { - return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Fence fence; - Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Fence fence; - Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createFramebuffer( const vk::FramebufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Framebuffer* pFramebuffer, Dispatch const &d) const - { - return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Framebuffer framebuffer; - Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Framebuffer framebuffer; - Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::GraphicsPipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d) const - { - return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); - } - template - VULKAN_HPP_INLINE ResultValueType::type Device::createGraphicsPipeline( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines; - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines( vectorAllocator ); - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelineUnique( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createImage( const vk::ImageCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Image* pImage, Dispatch const &d) const - { - return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Image image; - Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Image image; - Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createImageView( const vk::ImageViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ImageView* pView, Dispatch const &d) const - { - return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ImageView view; - Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ImageView view; - Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const vk::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const - { - return static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::IndirectCommandsLayoutNVX indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::IndirectCommandsLayoutNVX indirectCommandsLayout; - Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const vk::ObjectTableCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ObjectTableNVX* pObjectTable, Dispatch const &d) const - { - return static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pObjectTable ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ObjectTableNVX objectTable; - Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); - return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ObjectTableNVX objectTable; - Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createPipelineCache( const vk::PipelineCacheCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineCache* pPipelineCache, Dispatch const &d) const - { - return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::PipelineCache pipelineCache; - Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::PipelineCache pipelineCache; - Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createPipelineLayout( const vk::PipelineLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineLayout* pPipelineLayout, Dispatch const &d) const - { - return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::PipelineLayout pipelineLayout; - Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::PipelineLayout pipelineLayout; - Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createQueryPool( const vk::QueryPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::QueryPool* pQueryPool, Dispatch const &d) const - { - return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::QueryPool queryPool; - Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::QueryPool queryPool; - Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::RayTracingPipelineCreateInfoNV* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d) const - { - return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size() ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector pipelines( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); - } - template - VULKAN_HPP_INLINE ResultValueType::type Device::createRayTracingPipelineNV( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines; - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); - std::vector, Allocator> pipelines( vectorAllocator ); - pipelines.reserve( createInfos.size() ); - Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); - Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelineNVUnique( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const - { - Pipeline pipeline; - Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createRenderPass( const vk::RenderPassCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d) const - { - return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const vk::RenderPassCreateInfo2KHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d) const - { - return static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::RenderPass renderPass; - Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createSampler( const vk::SamplerCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Sampler* pSampler, Dispatch const &d) const - { - return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Sampler sampler; - Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Sampler sampler; - Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const - { - return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const - { - return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SamplerYcbcrConversion ycbcrConversion; - Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createSemaphore( const vk::SemaphoreCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Semaphore* pSemaphore, Dispatch const &d) const - { - return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Semaphore semaphore; - Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Semaphore semaphore; - Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createShaderModule( const vk::ShaderModuleCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ShaderModule* pShaderModule, Dispatch const &d) const - { - return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ShaderModule shaderModule; - Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ShaderModule shaderModule; - Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const vk::SwapchainCreateInfoKHR* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchains, Dispatch const &d) const - { - return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - std::vector swapchains( createInfos.size() ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector swapchains( createInfos.size(), vectorAllocator ); - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); - } - template - VULKAN_HPP_INLINE ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const - { - static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle ), "SwapchainKHR is greater than UniqueHandle!" ); - std::vector, Allocator> swapchainKHRs; - swapchainKHRs.reserve( createInfos.size() ); - SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( SwapchainKHR ) ) ); - Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const - { - static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle ), "SwapchainKHR is greater than UniqueHandle!" ); - std::vector, Allocator> swapchainKHRs( vectorAllocator ); - swapchainKHRs.reserve( createInfos.size() ); - SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( SwapchainKHR ) ) ); - Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); - if (result == vk::Result::eSuccess) - { - ObjectDestroy deleter( *this, allocator, d ); - for ( size_t i=0 ; i( buffer[i], deleter ) ); - } - } - - return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const vk::SwapchainCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchain, Dispatch const &d) const - { - return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SwapchainKHR swapchain; - Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const vk::ValidationCacheCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ValidationCacheEXT* pValidationCache, Dispatch const &d) const - { - return static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ValidationCacheEXT validationCache; - Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::ValidationCacheEXT validationCache; - Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const vk::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const - { - return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const vk::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const - { - return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyBuffer( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBuffer( vk::Buffer buffer, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Buffer buffer, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyBufferView( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBufferView( vk::BufferView bufferView, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::BufferView bufferView, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( vk::CommandPool commandPool, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::CommandPool commandPool, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( vk::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyEvent( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyEvent( vk::Event event, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Event event, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyFence( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFence( vk::Fence fence, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Fence fence, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( vk::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyImage( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImage( vk::Image image, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Image image, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyImageView( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImageView( vk::ImageView imageView, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::ImageView imageView, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( vk::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPipeline( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipeline( vk::Pipeline pipeline, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Pipeline pipeline, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( vk::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( vk::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( vk::QueryPool queryPool, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::QueryPool queryPool, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( vk::RenderPass renderPass, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::RenderPass renderPass, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySampler( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySampler( vk::Sampler sampler, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Sampler sampler, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySemaphore( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySemaphore( vk::Semaphore semaphore, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::Semaphore semaphore, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( vk::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( vk::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::destroy( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( vk::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const - { - return static_cast( d.vkDeviceWaitIdle( m_device ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::waitIdle(Dispatch const &d ) const - { - Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( vk::DisplayKHR display, const vk::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const - { - return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::displayPowerControlEXT( vk::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const - { - return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const - { - Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( vk::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::free( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( vk::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const - { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::freeDescriptorSets( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d) const - { - return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::freeDescriptorSets( vk::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const - { - Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::free( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d) const - { - return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::free( vk::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const - { - Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::freeMemory( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeMemory( vk::DeviceMemory memory, Optional allocator, Dispatch const &d ) const - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::free( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( vk::DeviceMemory memory, Optional allocator, Dispatch const &d ) const - { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const - { - return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const vk::AccelerationStructureMemoryRequirementsInfoNV* pInfo, vk::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const - { - vk::MemoryRequirements2KHR memoryRequirements; - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const - { - StructureChain structureChain; - vk::MemoryRequirements2KHR& memoryRequirements = structureChain.template get(); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, vk::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const - { - vk::AndroidHardwareBufferPropertiesANDROID properties; - Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const - { - StructureChain structureChain; - vk::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); - Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const vk::BufferDeviceAddressInfoEXT* pInfo, Dispatch const &d) const - { - return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfoEXT & info, Dispatch const &d ) const - { - return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( vk::Buffer buffer, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements Device::getBufferMemoryRequirements( vk::Buffer buffer, Dispatch const &d ) const - { - vk::MemoryRequirements memoryRequirements; - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - vk::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - StructureChain structureChain; - vk::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - vk::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - StructureChain structureChain; - vk::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const vk::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const - { - return static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); -#else - if ( timestampInfos.size() != timestamps.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - uint64_t maxDeviation; - Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const - { - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const - { - vk::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return support; - } - template - VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const - { - StructureChain structureChain; - vk::DescriptorSetLayoutSupport& support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const - { - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const - { - vk::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return support; - } - template - VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const - { - StructureChain structureChain; - vk::DescriptorSetLayoutSupport& support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const - { - d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const - { - vk::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); - return peerMemoryFeatures; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const - { - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const - { - vk::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); - return peerMemoryFeatures; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( vk::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const - { - return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const - { - vk::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; - Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); - return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const - { - return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - vk::DeviceGroupPresentModeFlagsKHR modes; - Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); - return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModes2EXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const - { - return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const - { - vk::DeviceGroupPresentModeFlagsKHR modes; - Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); - return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getMemoryCommitment( vk::DeviceMemory memory, vk::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const - { - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::DeviceSize Device::getMemoryCommitment( vk::DeviceMemory memory, Dispatch const &d ) const - { - vk::DeviceSize committedMemoryInBytes; - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); - return committedMemoryInBytes; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const - { - return d.vkGetDeviceProcAddr( m_device, pName ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const - { - return d.vkGetDeviceProcAddr( m_device, name.c_str() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, vk::Queue* pQueue, Dispatch const &d) const - { - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const - { - vk::Queue queue; - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); - return queue; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getQueue2( const vk::DeviceQueueInfo2* pQueueInfo, vk::Queue* pQueue, Dispatch const &d) const - { - d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const - { - vk::Queue queue; - d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); - return queue; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getEventStatus( vk::Event event, Dispatch const &d) const - { - return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getEventStatus( vk::Event event, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const vk::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const - { - return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const - { - int fd; - Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getFenceStatus( vk::Fence fence, Dispatch const &d) const - { - return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getFenceStatus( vk::Fence fence, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const vk::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const - { - return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( vk::Image image, vk::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getImageDrmFormatModifierPropertiesEXT( vk::Image image, Dispatch const &d ) const - { - vk::ImageDrmFormatModifierPropertiesEXT properties; - Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( vk::Image image, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements Device::getImageMemoryRequirements( vk::Image image, Dispatch const &d ) const - { - vk::MemoryRequirements memoryRequirements; - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - vk::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - StructureChain structureChain; - vk::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const - { - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - vk::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return memoryRequirements; - } - template - VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - StructureChain structureChain; - vk::MemoryRequirements2& memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( vk::Image image, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const - { - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( vk::Image image, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( vk::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const - { - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const - { - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } - template - VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector sparseMemoryRequirements( vectorAllocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); - return sparseMemoryRequirements; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( vk::Image image, const vk::ImageSubresource* pSubresource, vk::SubresourceLayout* pLayout, Dispatch const &d) const - { - d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::SubresourceLayout Device::getImageSubresourceLayout( vk::Image image, const ImageSubresource & subresource, Dispatch const &d ) const - { - vk::SubresourceLayout layout; - d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( &subresource ), reinterpret_cast( &layout ) ); - return layout; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const vk::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const - { - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d ) const - { - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const vk::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const - { - struct AHardwareBuffer* buffer; - Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); - return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const vk::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const - { - int fd; - Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, vk::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const - { - vk::MemoryFdPropertiesKHR memoryFdProperties; - Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); - return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, vk::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const - { - vk::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; - Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); - return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const vk::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, vk::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const - { - return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const - { - vk::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; - Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); - return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, vk::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const - { - return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d ) const - { - std::vector presentationTimings; - uint32_t presentationTimingCount; - Result result; - do - { - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentationTimingCount ) - { - presentationTimings.resize( presentationTimingCount ); - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - presentationTimings.resize( presentationTimingCount ); - } - return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector presentationTimings( vectorAllocator ); - uint32_t presentationTimingCount; - Result result; - do - { - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentationTimingCount ) - { - presentationTimings.resize( presentationTimingCount ); - result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - presentationTimings.resize( presentationTimingCount ); - } - return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, vk::PerformanceValueINTEL* pValue, Dispatch const &d) const - { - return static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const - { - vk::PerformanceValueINTEL value; - Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); - return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getPerformanceParameterINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getPipelineCacheData( vk::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const - { - return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( vk::PipelineCache pipelineCache, Dispatch const &d ) const - { - std::vector data; - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( vk::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector data( vectorAllocator ); - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, vk::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const - { - return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast( pInternalRepresentations ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const - { - std::vector internalRepresentations; - uint32_t internalRepresentationCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - internalRepresentations.resize( internalRepresentationCount ); - } - return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector internalRepresentations( vectorAllocator ); - uint32_t internalRepresentationCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - internalRepresentations.resize( internalRepresentationCount ); - } - return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const vk::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, vk::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( pPipelineInfo ), pExecutableCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const - { - std::vector properties; - uint32_t executableCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && executableCount ) - { - properties.resize( executableCount ); - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - properties.resize( executableCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t executableCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && executableCount ) - { - properties.resize( executableCount ); - result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - properties.resize( executableCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, vk::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const - { - return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( pExecutableInfo ), pStatisticCount, reinterpret_cast( pStatistics ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const - { - std::vector statistics; - uint32_t statisticCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - statistics.resize( statisticCount ); - } - return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector statistics( vectorAllocator ); - uint32_t statisticCount; - Result result; - do - { - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - statistics.resize( statisticCount ); - } - return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d) const - { - return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const - { - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, vk::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const - { - return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d ) const - { - vk::RefreshCycleDurationGOOGLE displayTimingProperties; - Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); - return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( vk::RenderPass renderPass, vk::Extent2D* pGranularity, Dispatch const &d) const - { - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::Extent2D Device::getRenderAreaGranularity( vk::RenderPass renderPass, Dispatch const &d ) const - { - vk::Extent2D granularity; - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); - return granularity; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const vk::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const - { - return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const - { - int fd; - Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const vk::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const - { - return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const - { - HANDLE handle; - Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const - { - return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Dispatch const &d ) const - { - std::vector info; - size_t infoSize; - Result result; - do - { - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && infoSize ) - { - info.resize( infoSize ); - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - info.resize( infoSize ); - } - return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector info( vectorAllocator ); - size_t infoSize; - Result result; - do - { - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && infoSize ) - { - info.resize( infoSize ); - result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - info.resize( infoSize ); - } - return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const - { - return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const - { - uint64_t counterValue; - Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); - return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( vk::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, vk::Image* pSwapchainImages, Dispatch const &d) const - { - return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Dispatch const &d ) const - { - std::vector swapchainImages; - uint32_t swapchainImageCount; - Result result; - do - { - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - swapchainImages.resize( swapchainImageCount ); - } - return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector swapchainImages( vectorAllocator ); - uint32_t swapchainImageCount; - Result result; - do - { - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - swapchainImages.resize( swapchainImageCount ); - } - return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( vk::SwapchainKHR swapchain, Dispatch const &d) const - { - return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); - } -#else - template - VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( vk::SwapchainKHR swapchain, Dispatch const &d ) const - { - Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const - { - return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Dispatch const &d ) const - { - std::vector data; - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector data( vectorAllocator ); - size_t dataSize; - Result result; - do - { - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); - if ( ( result == Result::eSuccess ) && dataSize ) - { - data.resize( dataSize ); - result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - data.resize( dataSize ); - } - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const vk::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const - { - return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const vk::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const - { - return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const vk::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const - { - return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const vk::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const - { - return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const vk::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const - { - return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const - { - return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const - { - Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags, void** ppData, Dispatch const &d) const - { - return static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), ppData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags, Dispatch const &d ) const - { - void* pData; - Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), &pData ) ); - return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::mergePipelineCaches( vk::PipelineCache dstCache, uint32_t srcCacheCount, const vk::PipelineCache* pSrcCaches, Dispatch const &d) const - { - return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::mergePipelineCaches( vk::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const - { - Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const vk::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const - { - return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const - { - Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::registerEventEXT( const vk::DeviceEventInfoEXT* pDeviceEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d) const - { - return static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const - { - vk::Fence fence; - Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( vk::DisplayKHR display, const vk::DisplayEventInfoEXT* pDisplayEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d) const - { - return static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::registerDisplayEventEXT( vk::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const - { - vk::Fence fence; - Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::registerObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const - { - return static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( ppObjectTableEntries ), pObjectIndices ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::registerObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() ); -#else - if ( pObjectTableEntries.size() != objectIndices.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), pObjectTableEntries.size() , reinterpret_cast( pObjectTableEntries.data() ), objectIndices.data() ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d) const - { - return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d) const - { - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d) const - { - return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags, Dispatch const &d) const - { - return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetEvent( vk::Event event, Dispatch const &d) const - { - return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::resetEvent( vk::Event event, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const vk::Fence* pFences, Dispatch const &d) const - { - return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const - { - Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const - { - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#else - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const - { - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const vk::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const - { - return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const vk::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const - { - return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::setEvent( vk::Event event, Dispatch const &d) const - { - return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Device::setEvent( vk::Event event, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const vk::SwapchainKHR* pSwapchains, const vk::HdrMetadataEXT* pMetadata, Dispatch const &d) const - { - d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); -#else - if ( swapchains.size() != metadata.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( vk::SwapchainKHR swapChain, vk::Bool32 localDimmingEnable, Dispatch const &d) const - { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( vk::SwapchainKHR swapChain, vk::Bool32 localDimmingEnable, Dispatch const &d ) const - { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPool( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d) const - { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::trimCommandPool( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d ) const - { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d) const - { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d ) const - { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d) const - { - d.vkUninitializePerformanceApiINTEL( m_device ); - } -#else - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d ) const - { - d.vkUninitializePerformanceApiINTEL( m_device ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::unmapMemory( vk::DeviceMemory memory, Dispatch const &d) const - { - d.vkUnmapMemory( m_device, static_cast( memory ) ); - } -#else - template - VULKAN_HPP_INLINE void Device::unmapMemory( vk::DeviceMemory memory, Dispatch const &d ) const - { - d.vkUnmapMemory( m_device, static_cast( memory ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const - { - return static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( pObjectEntryTypes ), pObjectIndices ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Device::unregisterObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() ); -#else - if ( objectEntryTypes.size() != objectIndices.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - Result result = static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectEntryTypes.size() , reinterpret_cast( objectEntryTypes.data() ), objectIndices.data() ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const - { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const - { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const - { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const - { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const vk::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const - { - d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const - { - d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const vk::Fence* pFences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const - { - return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const - { - Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template - VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const vk::AndroidSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - template - VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const vk::DebugReportCallbackCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugReportCallbackEXT* pCallback, Dispatch const &d) const - { - return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DebugReportCallbackEXT callback; - Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DebugReportCallbackEXT callback; - Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const vk::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const - { - return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DebugUtilsMessengerEXT messenger; - Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DebugUtilsMessengerEXT messenger; - Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const vk::DisplaySurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const vk::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_IOS_MVK - template - VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const vk::IOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#ifdef VK_USE_PLATFORM_FUCHSIA - template - VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const vk::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#ifdef VK_USE_PLATFORM_MACOS_MVK - template - VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const vk::MacOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - -#ifdef VK_USE_PLATFORM_METAL_EXT - template - VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const vk::MetalSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXT" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXTUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#ifdef VK_USE_PLATFORM_GGP - template - VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const vk::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGP" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ - -#ifdef VK_USE_PLATFORM_VI_NN - template - VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const vk::ViSurfaceCreateInfoNN* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const vk::WaylandSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const vk::Win32SurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#ifdef VK_USE_PLATFORM_XCB_KHR - template - VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const vk::XcbSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const vk::XlibSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const - { - return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::SurfaceKHR surface; - Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - - ObjectDestroy deleter( *this, allocator, d ); - return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const - { - d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); -#else - if ( layerPrefix.size() != message.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( vk::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( vk::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const - { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const - { - d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( vk::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::destroy( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( vk::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const - { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const - { - return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const - { - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector physicalDeviceGroupProperties( vectorAllocator ); - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const - { - return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const - { - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector physicalDeviceGroupProperties( vectorAllocator ); - uint32_t physicalDeviceGroupCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, vk::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const - { - return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const - { - std::vector physicalDevices; - uint32_t physicalDeviceCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceCount ) - { - physicalDevices.resize( physicalDeviceCount ); - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); - physicalDevices.resize( physicalDeviceCount ); - } - return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector physicalDevices( vectorAllocator ); - uint32_t physicalDeviceCount; - Result result; - do - { - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && physicalDeviceCount ) - { - physicalDevices.resize( physicalDeviceCount ); - result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); - physicalDevices.resize( physicalDeviceCount ); - } - return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const - { - return d.vkGetInstanceProcAddr( m_instance, pName ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const - { - return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const vk::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const - { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const - { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, vk::DisplayKHR display, Dispatch const &d) const - { - return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( vk::DisplayKHR display, Dispatch const &d ) const - { - Display dpy; - Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); - return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const vk::DeviceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Device* pDevice, Dispatch const &d) const - { - return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Device device; - Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" ); - } -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::Device device; - Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - - ObjectDestroy deleter( allocator, d ); - return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter ); - } -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( vk::DisplayKHR display, const vk::DisplayModeCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DisplayModeKHR* pMode, Dispatch const &d) const - { - return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDisplayModeKHR( vk::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const - { - vk::DisplayModeKHR mode; - Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); - return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d) const - { - return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d) const - { - return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( vk::DisplayKHR display, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( vk::DisplayKHR display, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const vk::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, vk::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const - { - return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const - { - vk::DisplayPlaneCapabilities2KHR capabilities; - Result result = static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); - return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, vk::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const - { - return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const - { - vk::DisplayPlaneCapabilitiesKHR capabilities; - Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); - return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, vk::DisplayKHR* pDisplays, Dispatch const &d) const - { - return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const - { - std::vector displays; - uint32_t displayCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && displayCount ) - { - displays.resize( displayCount ); - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - displays.resize( displayCount ); - } - return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector displays( vectorAllocator ); - uint32_t displayCount; - Result result; - do - { - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && displayCount ) - { - displays.resize( displayCount ); - result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - displays.resize( displayCount ); - } - return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, vk::TimeDomainEXT* pTimeDomains, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const - { - std::vector timeDomains; - uint32_t timeDomainCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - timeDomains.resize( timeDomainCount ); - } - return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector timeDomains( vectorAllocator ); - uint32_t timeDomainCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - timeDomains.resize( timeDomainCount ); - } - return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, vk::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, vk::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, vk::DisplayProperties2KHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPropertiesKHR* pProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - properties.resize( propertyCount ); - } - return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const - { - vk::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); - return externalBufferProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const - { - vk::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); - return externalBufferProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const - { - vk::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); - return externalFenceProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const - { - vk::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); - return externalFenceProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, vk::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const - { - vk::ExternalImageFormatPropertiesNV externalImageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); - return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const - { - vk::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); - return externalSemaphoreProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const - { - vk::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); - return externalSemaphoreProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( vk::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const - { - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const - { - vk::PhysicalDeviceFeatures features; - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); - return features; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const - { - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const - { - vk::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); - return features; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2(Dispatch const &d ) const - { - StructureChain structureChain; - vk::PhysicalDeviceFeatures2& features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const - { - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const - { - vk::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); - return features; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const - { - StructureChain structureChain; - vk::PhysicalDeviceFeatures2& features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( vk::Format format, vk::FormatProperties* pFormatProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::FormatProperties PhysicalDevice::getFormatProperties( vk::Format format, Dispatch const &d ) const - { - vk::FormatProperties formatProperties; - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return formatProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::FormatProperties2 PhysicalDevice::getFormatProperties2( vk::Format format, Dispatch const &d ) const - { - vk::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return formatProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2( vk::Format format, Dispatch const &d ) const - { - StructureChain structureChain; - vk::FormatProperties2& formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( vk::Format format, Dispatch const &d ) const - { - vk::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return formatProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2KHR( vk::Format format, Dispatch const &d ) const - { - StructureChain structureChain; - vk::FormatProperties2& formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( vk::DeviceGeneratedCommandsFeaturesNVX* pFeatures, vk::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const - { - d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( pFeatures ), reinterpret_cast( pLimits ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const - { - vk::DeviceGeneratedCommandsLimitsNVX limits; - d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( &features ), reinterpret_cast( &limits ) ); - return limits; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, Dispatch const &d ) const - { - vk::ImageFormatProperties imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - vk::ImageFormatProperties2 imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - StructureChain structureChain; - vk::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - vk::ImageFormatProperties2 imageFormatProperties; - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const - { - StructureChain structureChain; - vk::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( vk::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const - { - vk::PhysicalDeviceMemoryProperties memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return memoryProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const - { - vk::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return memoryProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const - { - StructureChain structureChain; - vk::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const - { - vk::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return memoryProperties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const - { - StructureChain structureChain; - vk::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, vk::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, Dispatch const &d ) const - { - vk::MultisamplePropertiesEXT multisampleProperties; - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); - return multisampleProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( vk::SurfaceKHR surface, uint32_t* pRectCount, vk::Rect2D* pRects, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const - { - std::vector rects; - uint32_t rectCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && rectCount ) - { - rects.resize( rectCount ); - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - rects.resize( rectCount ); - } - return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector rects( vectorAllocator ); - uint32_t rectCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && rectCount ) - { - rects.resize( rectCount ); - result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - rects.resize( rectCount ); - } - return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties( vk::PhysicalDeviceProperties* pProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const - { - vk::PhysicalDeviceProperties properties; - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const - { - vk::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); - return properties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2(Dispatch const &d ) const - { - StructureChain structureChain; - vk::PhysicalDeviceProperties2& properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE vk::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const - { - vk::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); - return properties; - } - template - VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR(Dispatch const &d ) const - { - StructureChain structureChain; - vk::PhysicalDeviceProperties2& properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); - return structureChain; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector queueFamilyProperties( vectorAllocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - return queueFamilyProperties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, uint32_t* pPropertyCount, vk::SparseImageFormatProperties* pProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d) const - { - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const - { - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } - template - VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector properties( vectorAllocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); - return properties; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, vk::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const - { - std::vector combinations; - uint32_t combinationCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - combinations.resize( combinationCount ); - } - return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector combinations( vectorAllocator ); - uint32_t combinationCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - combinations.resize( combinationCount ); - } - return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, vk::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, Dispatch const &d ) const - { - vk::SurfaceCapabilities2EXT surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - vk::SurfaceCapabilities2KHR surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - StructureChain structureChain; - vk::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, vk::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const - { - vk::SurfaceCapabilitiesKHR surfaceCapabilities; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, vk::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector surfaceFormats( vectorAllocator ); - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( vk::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, vk::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( vk::SurfaceKHR surface, Dispatch const &d ) const - { - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector surfaceFormats( vectorAllocator ); - uint32_t surfaceFormatCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const - { - std::vector presentModes; - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector presentModes( vectorAllocator ); - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( vk::SurfaceKHR surface, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const - { - std::vector presentModes; - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); - } - template - VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector presentModes( vectorAllocator ); - uint32_t presentModeCount; - Result result; - do - { - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); - if ( ( result == Result::eSuccess ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); - } - } while ( result == Result::eIncomplete ); - if ( result == Result::eSuccess ) - { - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - presentModes.resize( presentModeCount ); - } - return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, vk::Bool32* pSupported, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, Dispatch const &d ) const - { - vk::Bool32 supported; - Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); - return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const - { - return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#ifdef VK_USE_PLATFORM_WIN32_KHR -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); - } -#else - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const - { - return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = + PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = + PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = + PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const - { - return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_KHR - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const - { - return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const - { - return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - template - VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, vk::DisplayKHR* pDisplay, Dispatch const &d) const - { - return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const - { - vk::DisplayKHR display; - Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); - return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d) const - { - return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d ) const - { - Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, vk::CheckpointDataNV* pCheckpointData, Dispatch const &d) const - { - d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Dispatch const &d ) const - { - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return checkpointData; - } - template - VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const - { - std::vector checkpointData( vectorAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return checkpointData; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const - { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const - { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const vk::BindSparseInfo* pBindInfo, vk::Fence fence, Dispatch const &d) const - { - return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, vk::Fence fence, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const - { - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); - } -#else - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const - { - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const - { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const - { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Queue::presentKHR( const vk::PresentInfoKHR* pPresentInfo, Dispatch const &d) const - { - return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d) const - { - return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Queue::setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const vk::SubmitInfo* pSubmits, vk::Fence fence, Dispatch const &d) const - { - return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE ResultValueType::type Queue::submit( ArrayProxy submits, vk::Fence fence, Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const - { - return static_cast( d.vkQueueWaitIdle( m_queue ) ); - } -#else - template - VULKAN_HPP_INLINE ResultValueType::type Queue::waitIdle(Dispatch const &d ) const - { - Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_GGP - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_GGP*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - template <> struct isStructureChainValid{ enum { value = true }; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> struct isStructureChainValid{ enum { value = true }; }; - template <> struct isStructureChainValid{ enum { value = true }; }; - - class DispatchLoaderDynamic - { - public: - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0; - PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0; - PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkWaitForFences vkWaitForFences = 0; -#ifdef VK_USE_PLATFORM_ANDROID_KHR - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; -#ifdef VK_USE_PLATFORM_IOS_MVK - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = + PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = + PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#ifdef VK_USE_PLATFORM_GGP - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -#endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -#endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - - public: - DispatchLoaderDynamic() = default; - -#if !defined(VK_NO_PROTOTYPES) - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. - DispatchLoaderDynamic(vk::Instance const& instance, vk::Device const& device = {}) - { - init(instance, device); - } - - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. - void init(vk::Instance const& instance, vk::Device const& device = {}) - { - init(static_cast(instance), ::vkGetInstanceProcAddr, static_cast(device), device ? ::vkGetDeviceProcAddr : nullptr); - } -#endif // !defined(VK_NO_PROTOTYPES) - // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) - { - init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); - } +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - // This interface does not require a linked vulkan library. - void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) - { - VULKAN_HPP_ASSERT(instance && getInstanceProcAddr); - VULKAN_HPP_ASSERT(!!device == !!getDeviceProcAddr); - vkGetInstanceProcAddr = getInstanceProcAddr; - vkGetDeviceProcAddr = getDeviceProcAddr ? getDeviceProcAddr : PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr") ); - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( instance, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateInstanceLayerProperties" ) ); - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( instance, "vkEnumerateInstanceVersion" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( device ? vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) : vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( device ? vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( device ? vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( device ? vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( device ? vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( device ? vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( device ? vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( device ? vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( device ? vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) : vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( device ? vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) : vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( device ? vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) : vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( device ? vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) : vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( device ? vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( device ? vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) : vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( device ? vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) : vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( device ? vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) : vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( device ? vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) : vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( device ? vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) : vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( device ? vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) : vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( device ? vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) : vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( device ? vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) : vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( device ? vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) : vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( device ? vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) : vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( device ? vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) : vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( device ? vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) : vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( device ? vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( device ? vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( device ? vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( device ? vkGetDeviceProcAddr( device, "vkCmdDispatch" ) : vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( device ? vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) : vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( device ? vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) : vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( device ? vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) : vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdDraw = PFN_vkCmdDraw( device ? vkGetDeviceProcAddr( device, "vkCmdDraw" ) : vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( device ? vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( device ? vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( device ? vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( device ? vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) : vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( device ? vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( device ? vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( device ? vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) : vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( device ? vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( device ? vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) : vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( device ? vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) : vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( device ? vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( device ? vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) : vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( device ? vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) : vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( device ? vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( device ? vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) : vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( device ? vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) : vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( device ? vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) : vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( device ? vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) : vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( device ? vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) : vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( device ? vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) : vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( device ? vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) : vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( device ? vkGetDeviceProcAddr( device, "vkCmdReserveSpaceForCommandsNVX" ) : vkGetInstanceProcAddr( instance, "vkCmdReserveSpaceForCommandsNVX" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( device ? vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) : vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( device ? vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) : vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( device ? vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) : vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( device ? vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) : vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( device ? vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) : vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( device ? vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) : vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( device ? vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) : vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( device ? vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) : vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( device ? vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) : vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( device ? vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) : vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( device ? vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( device ? vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) : vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( device ? vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) : vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( device ? vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( device ? vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) : vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( device ? vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) : vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( device ? vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) : vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( device ? vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) : vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( device ? vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) : vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( device ? vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) : vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( device ? vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) : vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( device ? vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) : vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( device ? vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) : vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( device ? vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) : vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( device ? vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) : vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( device ? vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) : vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( device ? vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) : vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( device ? vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) : vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( device ? vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) : vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( device ? vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) : vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( device ? vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) : vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( device ? vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) : vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( device ? vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) : vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( device ? vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) : vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( device ? vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) : vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( device ? vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) : vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( device ? vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) : vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( device ? vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) : vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( device ? vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) : vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( device ? vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) : vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( device ? vkGetDeviceProcAddr( device, "vkAllocateMemory" ) : vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( device ? vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) : vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( device ? vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) : vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( device ? vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) : vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( device ? vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) : vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( device ? vkGetDeviceProcAddr( device, "vkBindImageMemory" ) : vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( device ? vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) : vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( device ? vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) : vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( device ? vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) : vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( device ? vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) : vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( device ? vkGetDeviceProcAddr( device, "vkCreateBuffer" ) : vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( device ? vkGetDeviceProcAddr( device, "vkCreateBufferView" ) : vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( device ? vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) : vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( device ? vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) : vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( device ? vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) : vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( device ? vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) : vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( device ? vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) : vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( device ? vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) : vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - vkCreateEvent = PFN_vkCreateEvent( device ? vkGetDeviceProcAddr( device, "vkCreateEvent" ) : vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkCreateFence = PFN_vkCreateFence( device ? vkGetDeviceProcAddr( device, "vkCreateFence" ) : vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( device ? vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) : vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( device ? vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) : vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateImage = PFN_vkCreateImage( device ? vkGetDeviceProcAddr( device, "vkCreateImage" ) : vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); - vkCreateImageView = PFN_vkCreateImageView( device ? vkGetDeviceProcAddr( device, "vkCreateImageView" ) : vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( device ? vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNVX" ) : vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNVX" ) ); - vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( device ? vkGetDeviceProcAddr( device, "vkCreateObjectTableNVX" ) : vkGetInstanceProcAddr( instance, "vkCreateObjectTableNVX" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( device ? vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) : vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( device ? vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) : vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( device ? vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) : vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( device ? vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) : vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( device ? vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) : vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( device ? vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) : vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); - vkCreateSampler = PFN_vkCreateSampler( device ? vkGetDeviceProcAddr( device, "vkCreateSampler" ) : vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( device ? vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) : vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( device ? vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) : vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( device ? vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) : vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( device ? vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) : vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( device ? vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) : vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( device ? vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) : vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( device ? vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) : vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( device ? vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) : vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( device ? vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) : vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( device ? vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) : vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( device ? vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) : vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( device ? vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) : vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( device ? vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) : vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( device ? vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) : vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( device ? vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) : vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( device ? vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) : vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( device ? vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) : vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( device ? vkGetDeviceProcAddr( device, "vkDestroyDevice" ) : vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( device ? vkGetDeviceProcAddr( device, "vkDestroyEvent" ) : vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkDestroyFence = PFN_vkDestroyFence( device ? vkGetDeviceProcAddr( device, "vkDestroyFence" ) : vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( device ? vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) : vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkDestroyImage = PFN_vkDestroyImage( device ? vkGetDeviceProcAddr( device, "vkDestroyImage" ) : vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( device ? vkGetDeviceProcAddr( device, "vkDestroyImageView" ) : vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( device ? vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNVX" ) : vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNVX" ) ); - vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( device ? vkGetDeviceProcAddr( device, "vkDestroyObjectTableNVX" ) : vkGetInstanceProcAddr( instance, "vkDestroyObjectTableNVX" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( device ? vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) : vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( device ? vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) : vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( device ? vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) : vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( device ? vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) : vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( device ? vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) : vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkDestroySampler = PFN_vkDestroySampler( device ? vkGetDeviceProcAddr( device, "vkDestroySampler" ) : vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( device ? vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) : vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( device ? vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) : vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( device ? vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) : vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( device ? vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) : vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( device ? vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) : vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( device ? vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) : vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( device ? vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) : vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( device ? vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) : vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( device ? vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) : vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( device ? vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) : vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( device ? vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) : vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkFreeMemory = PFN_vkFreeMemory( device ? vkGetDeviceProcAddr( device, "vkFreeMemory" ) : vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( device ? vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) : vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( device ? vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) : vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( device ? vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) : vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( device ? vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) : vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( device ? vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) : vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( device ? vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) : vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( device ? vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) : vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( device ? vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) : vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( device ? vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) : vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( device ? vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) : vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( device ? vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( device ? vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( device ? vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( device ? vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( device ? vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( device ? vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( device ? vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( device ? vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) : vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( device ? vkGetDeviceProcAddr( device, "vkGetEventStatus" ) : vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( device ? vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) : vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( device ? vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) : vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( device ? vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) : vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( device ? vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) : vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( device ? vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) : vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( device ? vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) : vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( device ? vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) : vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( device ? vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) : vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( device ? vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) : vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( device ? vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) : vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( device ? vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) : vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( device ? vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) : vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( device ? vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( device ? vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( device ? vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( device ? vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( device ? vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( device ? vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( device ? vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( device ? vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) : vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( device ? vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) : vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( device ? vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) : vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( device ? vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) : vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( device ? vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( device ? vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) : vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( device ? vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) : vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( device ? vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) : vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( device ? vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) : vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( device ? vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) : vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( device ? vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) : vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( device ? vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) : vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( device ? vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) : vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( device ? vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) : vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( device ? vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) : vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( device ? vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) : vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( device ? vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) : vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( device ? vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) : vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( device ? vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) : vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( device ? vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) : vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( device ? vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) : vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( device ? vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) : vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( device ? vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) : vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkMapMemory = PFN_vkMapMemory( device ? vkGetDeviceProcAddr( device, "vkMapMemory" ) : vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( device ? vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) : vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( device ? vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) : vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( device ? vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) : vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( device ? vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) : vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( device ? vkGetDeviceProcAddr( device, "vkRegisterObjectsNVX" ) : vkGetInstanceProcAddr( instance, "vkRegisterObjectsNVX" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( device ? vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) : vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( device ? vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) : vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( device ? vkGetDeviceProcAddr( device, "vkResetCommandPool" ) : vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( device ? vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) : vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkResetEvent = PFN_vkResetEvent( device ? vkGetDeviceProcAddr( device, "vkResetEvent" ) : vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkResetFences = PFN_vkResetFences( device ? vkGetDeviceProcAddr( device, "vkResetFences" ) : vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( device ? vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) : vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( device ? vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) : vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( device ? vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) : vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkSetEvent = PFN_vkSetEvent( device ? vkGetDeviceProcAddr( device, "vkSetEvent" ) : vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( device ? vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) : vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( device ? vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) : vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( device ? vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) : vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( device ? vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) : vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( device ? vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) : vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( device ? vkGetDeviceProcAddr( device, "vkUnmapMemory" ) : vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( device ? vkGetDeviceProcAddr( device, "vkUnregisterObjectsNVX" ) : vkGetInstanceProcAddr( instance, "vkUnregisterObjectsNVX" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( device ? vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) : vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( device ? vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) : vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( device ? vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) : vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkWaitForFences = PFN_vkWaitForFences( device ? vkGetDeviceProcAddr( device, "vkWaitForFences" ) : vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); -#ifdef VK_USE_PLATFORM_ANDROID_KHR - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); -#ifdef VK_USE_PLATFORM_IOS_MVK - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); -#endif /*VK_USE_PLATFORM_IOS_MVK*/ -#ifdef VK_USE_PLATFORM_FUCHSIA - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = + PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( + vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( + vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = + PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( + vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ -#ifdef VK_USE_PLATFORM_MACOS_MVK - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ -#ifdef VK_USE_PLATFORM_METAL_EXT - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#ifdef VK_USE_PLATFORM_GGP - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); -#endif /*VK_USE_PLATFORM_GGP*/ -#ifdef VK_USE_PLATFORM_VI_NN - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); -#endif /*VK_USE_PLATFORM_VI_NN*/ -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX" ) ); - vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); - vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); - vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); -#ifdef VK_USE_PLATFORM_WAYLAND_KHR - vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -#ifdef VK_USE_PLATFORM_WIN32_KHR - vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VK_USE_PLATFORM_XCB_KHR - vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); -#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_KHR - vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ -#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( device ? vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) : vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( device ? vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) : vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( device ? vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) : vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( device ? vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) : vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( device ? vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) : vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( device ? vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) : vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( device ? vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) : vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( device ? vkGetDeviceProcAddr( device, "vkQueueSubmit" ) : vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( device ? vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) : vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); - } - }; -} // namespace VULKAN_HPP_NAMESPACE + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = + PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = + PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = + PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = + PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = + PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = + PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + } + }; +} // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_android.h b/Externals/Vulkan/Include/vulkan/vulkan_android.h index 9b8d3e276f80..de79d382f2f7 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_android.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_android.h @@ -2,19 +2,9 @@ #define VULKAN_ANDROID_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -54,7 +44,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( #define VK_ANDROID_external_memory_android_hardware_buffer 1 struct AHardwareBuffer; -#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 3 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 4 #define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" typedef struct VkAndroidHardwareBufferUsageANDROID { VkStructureType sType; @@ -100,6 +90,19 @@ typedef struct VkExternalFormatANDROID { uint64_t externalFormat; } VkExternalFormatANDROID; +typedef struct VkAndroidHardwareBufferFormatProperties2ANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags2 formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatProperties2ANDROID; + typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); diff --git a/Externals/Vulkan/Include/vulkan/vulkan_beta.h b/Externals/Vulkan/Include/vulkan/vulkan_beta.h new file mode 100644 index 000000000000..7fa0d065bc51 --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_beta.h @@ -0,0 +1,933 @@ +#ifndef VULKAN_BETA_H_ +#define VULKAN_BETA_H_ 1 + +/* +** Copyright 2015-2022 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 2 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT = 0x00020000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT = 0x00000001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT = 0x00000002, +#endif + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilityFlagBitsKHR { + VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilityFlagBitsKHR; +typedef VkFlags VkVideoCapabilityFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_DEFAULT_KHR = 0, + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_DEFAULT_KHR = 0, + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; + +typedef enum VkVideoCodingQualityPresetFlagBitsKHR { + VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_QUALITY_PRESET_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingQualityPresetFlagBitsKHR; +typedef VkFlags VkVideoCodingQualityPresetFlagsKHR; +typedef struct VkQueueFamilyQueryResultStatusProperties2KHR { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkQueueFamilyQueryResultStatusProperties2KHR; + +typedef struct VkVideoQueueFamilyProperties2KHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkVideoQueueFamilyProperties2KHR; + +typedef struct VkVideoProfileKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileKHR; + +typedef struct VkVideoProfilesKHR { + VkStructureType sType; + void* pNext; + uint32_t profileCount; + const VkVideoProfileKHR* pProfiles; +} VkVideoProfilesKHR; + +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilityFlagsKHR capabilityFlags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D videoPictureExtentGranularity; + VkExtent2D minExtent; + VkExtent2D maxExtent; + uint32_t maxReferencePicturesSlotsCount; + uint32_t maxReferencePicturesActiveCount; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags imageUsage; + const VkVideoProfilesKHR* pVideoProfiles; +} VkPhysicalDeviceVideoFormatInfoKHR; + +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; +} VkVideoFormatPropertiesKHR; + +typedef struct VkVideoPictureResourceKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceKHR; + +typedef struct VkVideoReferenceSlotKHR { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const VkVideoPictureResourceKHR* pPictureResource; +} VkVideoReferenceSlotKHR; + +typedef struct VkVideoGetMemoryPropertiesKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkMemoryRequirements2* pMemoryRequirements; +} VkVideoGetMemoryPropertiesKHR; + +typedef struct VkVideoBindMemoryKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkVideoBindMemoryKHR; + +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePicturesFormat; + uint32_t maxReferencePicturesSlotsCount; + uint32_t maxReferencePicturesActiveCount; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; + +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; + +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoCodingQualityPresetFlagsKHR codecQualityPreset; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; + +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pVideoSessionMemoryRequirementsCount, VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t videoSessionBindMemoryCount, const VkVideoBindMemoryKHR* pVideoSessionBindMemories); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( + VkDevice device, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t* pVideoSessionMemoryRequirementsCount, + VkVideoGetMemoryPropertiesKHR* pVideoSessionMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VkVideoBindMemoryKHR* pVideoSessionBindMemories); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( + VkDevice device, + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); + +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo); +#endif + + +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 2 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" + +typedef enum VkVideoDecodeFlagBitsKHR { + VK_VIDEO_DECODE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeFlagBitsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeFlagsKHR flags; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceKHR dstPictureResource; + const VkVideoReferenceSlotKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pFrameInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR* pFrameInfo); +#endif + + +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 3 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeRateControlFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_RESERVED_0_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 1, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 2, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + uint32_t qualityLevel; + VkExtent2D codedExtent; + VkBuffer dstBitstreamBuffer; + VkDeviceSize dstBitstreamBufferOffset; + VkDeviceSize dstBitstreamBufferMaxRange; + VkVideoPictureResourceKHR srcPictureResource; + const VkVideoReferenceSlotKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotKHR* pReferenceSlots; + uint32_t precedingExternallyEncodedBytes; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeRateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t averageBitrate; + uint32_t maxBitrate; + uint32_t frameRateNumerator; + uint32_t frameRateDenominator; + uint32_t virtualBufferSizeInMs; + uint32_t initialVirtualBufferSizeInMs; +} VkVideoEncodeRateControlLayerInfoKHR; + +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint8_t layerCount; + const VkVideoEncodeRateControlLayerInfoKHR* pLayerConfigs; +} VkVideoEncodeRateControlInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif + + +#define VK_EXT_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 3 +#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilityFlagBitsEXT { + VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00000200, + VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT = 0x00000400, + VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264CapabilityFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264CapabilityFlagsEXT; + +typedef enum VkVideoEncodeH264InputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264InputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT; + +typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264OutputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT; + +typedef enum VkVideoEncodeH264CreateFlagBitsEXT { + VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT = 0, + VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264CreateFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264CreateFlagsEXT; + +typedef enum VkVideoEncodeH264RateControlStructureFlagBitsEXT { + VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH264RateControlStructureFlagBitsEXT; +typedef VkFlags VkVideoEncodeH264RateControlStructureFlagsEXT; +typedef struct VkVideoEncodeH264CapabilitiesEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264CapabilityFlagsEXT flags; + VkVideoEncodeH264InputModeFlagsEXT inputModeFlags; + VkVideoEncodeH264OutputModeFlagsEXT outputModeFlags; + VkExtent2D minPictureSizeInMbs; + VkExtent2D maxPictureSizeInMbs; + VkExtent2D inputImageDataAlignment; + uint8_t maxNumL0ReferenceForP; + uint8_t maxNumL0ReferenceForB; + uint8_t maxNumL1Reference; + uint8_t qualityLevelCount; + VkExtensionProperties stdExtensionVersion; +} VkVideoEncodeH264CapabilitiesEXT; + +typedef struct VkVideoEncodeH264SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264CreateFlagsEXT flags; + VkExtent2D maxPictureSizeInMbs; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoEncodeH264SessionCreateInfoEXT; + +typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH264SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH264PictureParameterSet* pPpsStd; +} VkVideoEncodeH264SessionParametersAddInfoEXT; + +typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoEncodeH264SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoEXT; + +typedef struct VkVideoEncodeH264DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; +} VkVideoEncodeH264DpbSlotInfoEXT; + +typedef struct VkVideoEncodeH264NaluSliceEXT { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264SliceHeader* pSliceHeaderStd; + uint32_t mbCount; + uint8_t refFinalList0EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList0Entries; + uint8_t refFinalList1EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefFinalList1Entries; +} VkVideoEncodeH264NaluSliceEXT; + +typedef struct VkVideoEncodeH264VclFrameInfoEXT { + VkStructureType sType; + const void* pNext; + uint8_t refDefaultFinalList0EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList0Entries; + uint8_t refDefaultFinalList1EntryCount; + const VkVideoEncodeH264DpbSlotInfoEXT* pRefDefaultFinalList1Entries; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceEXT* pNaluSliceEntries; + const VkVideoEncodeH264DpbSlotInfoEXT* pCurrentPictureInfo; +} VkVideoEncodeH264VclFrameInfoEXT; + +typedef struct VkVideoEncodeH264EmitPictureParametersEXT { + VkStructureType sType; + const void* pNext; + uint8_t spsId; + VkBool32 emitSpsEnable; + uint32_t ppsIdEntryCount; + const uint8_t* ppsIdEntries; +} VkVideoEncodeH264EmitPictureParametersEXT; + +typedef struct VkVideoEncodeH264ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileEXT; + +typedef struct VkVideoEncodeH264RateControlInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + VkVideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure; + uint8_t temporalLayerCount; +} VkVideoEncodeH264RateControlInfoEXT; + +typedef struct VkVideoEncodeH264QpEXT { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH264QpEXT; + +typedef struct VkVideoEncodeH264FrameSizeEXT { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH264FrameSizeEXT; + +typedef struct VkVideoEncodeH264RateControlLayerInfoEXT { + VkStructureType sType; + const void* pNext; + uint8_t temporalLayerId; + VkBool32 useInitialRcQp; + VkVideoEncodeH264QpEXT initialRcQp; + VkBool32 useMinQp; + VkVideoEncodeH264QpEXT minQp; + VkBool32 useMaxQp; + VkVideoEncodeH264QpEXT maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH264FrameSizeEXT maxFrameSize; +} VkVideoEncodeH264RateControlLayerInfoEXT; + + + +#define VK_EXT_video_encode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_encode.h" +#define VK_EXT_VIDEO_ENCODE_H265_SPEC_VERSION 3 +#define VK_EXT_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_EXT_video_encode_h265" +typedef VkFlags VkVideoEncodeH265CapabilityFlagsEXT; + +typedef enum VkVideoEncodeH265InputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H265_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH265InputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH265InputModeFlagsEXT; + +typedef enum VkVideoEncodeH265OutputModeFlagBitsEXT { + VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH265OutputModeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH265OutputModeFlagsEXT; +typedef VkFlags VkVideoEncodeH265CreateFlagsEXT; + +typedef enum VkVideoEncodeH265CtbSizeFlagBitsEXT { + VK_VIDEO_ENCODE_H265_CTB_SIZE_8_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT = 0x00000004, + VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT = 0x00000008, + VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH265CtbSizeFlagBitsEXT; +typedef VkFlags VkVideoEncodeH265CtbSizeFlagsEXT; + +typedef enum VkVideoEncodeH265RateControlStructureFlagBitsEXT { + VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT = 0x00000001, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT = 0x00000002, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoEncodeH265RateControlStructureFlagBitsEXT; +typedef VkFlags VkVideoEncodeH265RateControlStructureFlagsEXT; +typedef struct VkVideoEncodeH265CapabilitiesEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH265CapabilityFlagsEXT flags; + VkVideoEncodeH265InputModeFlagsEXT inputModeFlags; + VkVideoEncodeH265OutputModeFlagsEXT outputModeFlags; + VkVideoEncodeH265CtbSizeFlagsEXT ctbSizes; + VkExtent2D inputImageDataAlignment; + uint8_t maxNumL0ReferenceForP; + uint8_t maxNumL0ReferenceForB; + uint8_t maxNumL1Reference; + uint8_t maxNumSubLayers; + uint8_t qualityLevelCount; + VkExtensionProperties stdExtensionVersion; +} VkVideoEncodeH265CapabilitiesEXT; + +typedef struct VkVideoEncodeH265SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH265CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoEncodeH265SessionCreateInfoEXT; + +typedef struct VkVideoEncodeH265SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t vpsStdCount; + const StdVideoH265VideoParameterSet* pVpsStd; + uint32_t spsStdCount; + const StdVideoH265SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH265PictureParameterSet* pPpsStd; +} VkVideoEncodeH265SessionParametersAddInfoEXT; + +typedef struct VkVideoEncodeH265SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxVpsStdCount; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoEncodeH265SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoEncodeH265SessionParametersCreateInfoEXT; + +typedef struct VkVideoEncodeH265DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + int8_t slotIndex; + const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH265DpbSlotInfoEXT; + +typedef struct VkVideoEncodeH265ReferenceListsEXT { + VkStructureType sType; + const void* pNext; + uint8_t referenceList0EntryCount; + const VkVideoEncodeH265DpbSlotInfoEXT* pReferenceList0Entries; + uint8_t referenceList1EntryCount; + const VkVideoEncodeH265DpbSlotInfoEXT* pReferenceList1Entries; + const StdVideoEncodeH265ReferenceModifications* pReferenceModifications; +} VkVideoEncodeH265ReferenceListsEXT; + +typedef struct VkVideoEncodeH265NaluSliceEXT { + VkStructureType sType; + const void* pNext; + uint32_t ctbCount; + const VkVideoEncodeH265ReferenceListsEXT* pReferenceFinalLists; + const StdVideoEncodeH265SliceHeader* pSliceHeaderStd; +} VkVideoEncodeH265NaluSliceEXT; + +typedef struct VkVideoEncodeH265VclFrameInfoEXT { + VkStructureType sType; + const void* pNext; + const VkVideoEncodeH265ReferenceListsEXT* pReferenceFinalLists; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH265NaluSliceEXT* pNaluSliceEntries; + const StdVideoEncodeH265PictureInfo* pCurrentPictureInfo; +} VkVideoEncodeH265VclFrameInfoEXT; + +typedef struct VkVideoEncodeH265EmitPictureParametersEXT { + VkStructureType sType; + const void* pNext; + uint8_t vpsId; + uint8_t spsId; + VkBool32 emitVpsEnable; + VkBool32 emitSpsEnable; + uint32_t ppsIdEntryCount; + const uint8_t* ppsIdEntries; +} VkVideoEncodeH265EmitPictureParametersEXT; + +typedef struct VkVideoEncodeH265ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoEncodeH265ProfileEXT; + +typedef struct VkVideoEncodeH265RateControlInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + VkVideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure; + uint8_t subLayerCount; +} VkVideoEncodeH265RateControlInfoEXT; + +typedef struct VkVideoEncodeH265QpEXT { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH265QpEXT; + +typedef struct VkVideoEncodeH265FrameSizeEXT { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH265FrameSizeEXT; + +typedef struct VkVideoEncodeH265RateControlLayerInfoEXT { + VkStructureType sType; + const void* pNext; + uint8_t temporalId; + VkBool32 useInitialRcQp; + VkVideoEncodeH265QpEXT initialRcQp; + VkBool32 useMinQp; + VkVideoEncodeH265QpEXT minQp; + VkBool32 useMaxQp; + VkVideoEncodeH265QpEXT maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH265FrameSizeEXT maxFrameSize; +} VkVideoEncodeH265RateControlLayerInfoEXT; + + + +#define VK_EXT_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_EXT_VIDEO_DECODE_H264_SPEC_VERSION 3 +#define VK_EXT_VIDEO_DECODE_H264_EXTENSION_NAME "VK_EXT_video_decode_h264" + +typedef enum VkVideoDecodeH264PictureLayoutFlagBitsEXT { + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_EXT = 0, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_EXT = 0x00000001, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_EXT = 0x00000002, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkVideoDecodeH264PictureLayoutFlagBitsEXT; +typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsEXT; +typedef VkFlags VkVideoDecodeH264CreateFlagsEXT; +typedef struct VkVideoDecodeH264ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264PictureLayoutFlagsEXT pictureLayout; +} VkVideoDecodeH264ProfileEXT; + +typedef struct VkVideoDecodeH264CapabilitiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxLevel; + VkOffset2D fieldOffsetGranularity; + VkExtensionProperties stdExtensionVersion; +} VkVideoDecodeH264CapabilitiesEXT; + +typedef struct VkVideoDecodeH264SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoDecodeH264CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoDecodeH264SessionCreateInfoEXT; + +typedef struct VkVideoDecodeH264SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH264SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH264PictureParameterSet* pPpsStd; +} VkVideoDecodeH264SessionParametersAddInfoEXT; + +typedef struct VkVideoDecodeH264SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoDecodeH264SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoEXT; + +typedef struct VkVideoDecodeH264PictureInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t slicesCount; + const uint32_t* pSlicesDataOffsets; +} VkVideoDecodeH264PictureInfoEXT; + +typedef struct VkVideoDecodeH264MvcEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264Mvc* pStdMvc; +} VkVideoDecodeH264MvcEXT; + +typedef struct VkVideoDecodeH264DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoEXT; + + + +#define VK_EXT_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_EXT_VIDEO_DECODE_H265_SPEC_VERSION 1 +#define VK_EXT_VIDEO_DECODE_H265_EXTENSION_NAME "VK_EXT_video_decode_h265" +typedef VkFlags VkVideoDecodeH265CreateFlagsEXT; +typedef struct VkVideoDecodeH265ProfileEXT { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileEXT; + +typedef struct VkVideoDecodeH265CapabilitiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxLevel; + VkExtensionProperties stdExtensionVersion; +} VkVideoDecodeH265CapabilitiesEXT; + +typedef struct VkVideoDecodeH265SessionCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkVideoDecodeH265CreateFlagsEXT flags; + const VkExtensionProperties* pStdExtensionVersion; +} VkVideoDecodeH265SessionCreateInfoEXT; + +typedef struct VkVideoDecodeH265SessionParametersAddInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t spsStdCount; + const StdVideoH265SequenceParameterSet* pSpsStd; + uint32_t ppsStdCount; + const StdVideoH265PictureParameterSet* pPpsStd; +} VkVideoDecodeH265SessionParametersAddInfoEXT; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t maxSpsStdCount; + uint32_t maxPpsStdCount; + const VkVideoDecodeH265SessionParametersAddInfoEXT* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoEXT; + +typedef struct VkVideoDecodeH265PictureInfoEXT { + VkStructureType sType; + const void* pNext; + StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t slicesCount; + const uint32_t* pSlicesDataOffsets; +} VkVideoDecodeH265PictureInfoEXT; + +typedef struct VkVideoDecodeH265DpbSlotInfoEXT { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoEXT; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_core.h b/Externals/Vulkan/Include/vulkan/vulkan_core.h index 50f72f677b22..228e4ef6e5a7 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_core.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_core.h @@ -2,19 +2,9 @@ #define VULKAN_CORE_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -31,40 +21,82 @@ extern "C" { #define VK_VERSION_1_0 1 #include "vk_platform.h" + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif +#endif + +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. #define VK_MAKE_VERSION(major, minor, patch) \ - (((major) << 22) | ((minor) << 12) | (patch)) + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) // DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. //#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + // Vulkan 1.0 version number -#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 -#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) -#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) -#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) // Version of this file -#define VK_HEADER_VERSION 121 - +#define VK_HEADER_VERSION 204 -#define VK_NULL_HANDLE 0 +// Complete version of this file +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) +// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) -#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; - +// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) -#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) -#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; -#else - #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; -#endif -#endif +// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) -typedef uint32_t VkFlags; +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) +#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) typedef uint32_t VkBool32; +typedef uint64_t VkDeviceAddress; typedef uint64_t VkDeviceSize; +typedef uint32_t VkFlags; typedef uint32_t VkSampleMask; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) VK_DEFINE_HANDLE(VkInstance) VK_DEFINE_HANDLE(VkPhysicalDevice) VK_DEFINE_HANDLE(VkDevice) @@ -73,8 +105,6 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) VK_DEFINE_HANDLE(VkCommandBuffer) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) @@ -82,37 +112,29 @@ VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) -#define VK_LOD_CLAMP_NONE 1000.0f -#define VK_REMAINING_MIP_LEVELS (~0U) -#define VK_REMAINING_ARRAY_LAYERS (~0U) -#define VK_WHOLE_SIZE (~0ULL) +#define VK_UUID_SIZE 16U #define VK_ATTACHMENT_UNUSED (~0U) -#define VK_TRUE 1 -#define VK_FALSE 0 +#define VK_FALSE 0U +#define VK_LOD_CLAMP_NONE 1000.0F #define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_REMAINING_MIP_LEVELS (~0U) #define VK_SUBPASS_EXTERNAL (~0U) -#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 -#define VK_UUID_SIZE 16 -#define VK_MAX_MEMORY_TYPES 32 -#define VK_MAX_MEMORY_HEAPS 16 -#define VK_MAX_EXTENSION_NAME_SIZE 256 -#define VK_MAX_DESCRIPTION_SIZE 256 - -typedef enum VkPipelineCacheHeaderVersion { - VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, - VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), - VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCacheHeaderVersion; +#define VK_TRUE 1U +#define VK_WHOLE_SIZE (~0ULL) +#define VK_MAX_MEMORY_TYPES 32U +#define VK_MAX_MEMORY_HEAPS 16U +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U +#define VK_MAX_EXTENSION_NAME_SIZE 256U +#define VK_MAX_DESCRIPTION_SIZE 256U typedef enum VkResult { VK_SUCCESS = 0, @@ -133,8 +155,12 @@ typedef enum VkResult { VK_ERROR_TOO_MANY_OBJECTS = -10, VK_ERROR_FORMAT_NOT_SUPPORTED = -11, VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_UNKNOWN = -13, VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_FRAGMENTATION = -1000161000, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, + VK_PIPELINE_COMPILE_REQUIRED = 1000297000, VK_ERROR_SURFACE_LOST_KHR = -1000000000, VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, VK_SUBOPTIMAL_KHR = 1000001003, @@ -143,15 +169,20 @@ typedef enum VkResult { VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, VK_ERROR_INVALID_SHADER_NV = -1000012000, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, - VK_ERROR_FRAGMENTATION_EXT = -1000161000, - VK_ERROR_NOT_PERMITTED_EXT = -1000174001, - VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = -1000244000, + VK_ERROR_NOT_PERMITTED_KHR = -1000174001, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, - VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, - VK_RESULT_END_RANGE = VK_INCOMPLETE, - VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), + VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, + VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR, + VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, VK_RESULT_MAX_ENUM = 0x7FFFFFFF } VkResult; @@ -270,6 +301,108 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES = 53, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES = 54, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO = 1000192000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES = 1000215000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES = 1000245000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES = 1000295000, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO = 1000295001, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO = 1000295002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES = 1000297000, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 = 1000314000, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2 = 1000314001, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 = 1000314002, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO = 1000314003, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2 = 1000314004, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO = 1000314005, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO = 1000314006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES = 1000314007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES = 1000325000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES = 1000335000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2 = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2 = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2 = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2 = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2 = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2 = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2 = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2 = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2 = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES = 1000225000, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO = 1000225001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES = 1000225002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES = 1000138000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES = 1000138001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK = 1000138002, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO = 1000138003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES = 1000066000, + VK_STRUCTURE_TYPE_RENDERING_INFO = 1000044000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO = 1000044001, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO = 1000044002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES = 1000044003, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO = 1000044004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES = 1000280000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES = 1000280001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES = 1000281001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 = 1000360000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES = 1000413000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, @@ -291,14 +424,169 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR = 1000023000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR = 1000023002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR = 1000023003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR = 1000023004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR = 1000023011, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000023012, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR = 1000023013, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_2_KHR = 1000023016, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, +#endif VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, + VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, + VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT = 1000038001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT = 1000038006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT = 1000038007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT = 1000038008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT = 1000038009, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT = 1000038010, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT = 1000039000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_EXT = 1000039001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000039002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000039003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT = 1000039004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT = 1000039005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_EXT = 1000039006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_EXT = 1000039007, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_EXT = 1000039008, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_EXT = 1000039009, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT = 1000039010, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT = 1000039011, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT = 1000040000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT = 1000040001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT = 1000040002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT = 1000040003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT = 1000040004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000040005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000040006, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT = 1000040007, +#endif VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, @@ -308,7 +596,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = 1000066000, VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, @@ -329,14 +616,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = 1000082000, VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, - VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, - VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, - VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, - VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, @@ -353,23 +633,19 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001, VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = 1000108000, - VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = 1000108001, - VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = 1000108002, - VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = 1000108003, - VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = 1000109000, - VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = 1000109001, - VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = 1000109002, - VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = 1000109003, - VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = 1000109004, - VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = 1000109005, - VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = 1000109006, VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002, + VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003, + VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, @@ -391,38 +667,54 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, - VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = 1000138000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = 1000138001, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = 1000138002, - VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = 1000138003, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, - VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013, VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, - VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT = 1000158006, VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = 1000161000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = 1000161001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = 1000161002, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = 1000161003, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = 1000161004, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, +#endif VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, @@ -442,25 +734,42 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000, VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = 1000177000, VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = 1000180000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000, VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT = 1000187000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT = 1000187001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000187002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000187003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT = 1000187004, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT = 1000187005, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT = 1000187006, +#endif + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002, VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, - VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000192000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = 1000196000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = 1000197000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = 1000199000, - VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = 1000199001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, @@ -471,13 +780,12 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, - VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = 1000210000, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002, VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003, VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004, VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = 1000211000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000, VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000, VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001, @@ -486,22 +794,23 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = 1000221000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = 1000225000, - VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = 1000225001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = 1000225002, + VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000, - VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = 1000244001, VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002, - VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = 1000246000, VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR = 1000248000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000, VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002, @@ -510,7 +819,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = 1000253000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, @@ -518,20 +829,123 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = 1000261000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, + VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, + VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = 1000342000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = 1000351000, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, + VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, + VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, + VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, + VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA = 1000366000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA = 1000366001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA = 1000366002, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA = 1000366003, + VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA = 1000366004, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA = 1000366005, + VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA = 1000366006, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA = 1000366007, + VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA = 1000366008, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA = 1000366009, + VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI = 1000369000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI = 1000369001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI = 1000369002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000, + VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000, + VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000, + VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001, + VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, @@ -551,6 +965,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, @@ -564,10 +979,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, @@ -575,15 +1002,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, @@ -592,32 +1026,223 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, - VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, - VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkStructureType; +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, +#endif + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, + VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, +#endif + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT = 1000295000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, +#endif + VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, + VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, + VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkVendorId { + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_CODEPLAY = 0x10004, + VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_POCL = 0x10006, + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + typedef enum VkSystemAllocationScope { VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, - VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, - VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF } VkSystemAllocationScope; typedef enum VkInternalAllocationType { VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, - VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF } VkInternalAllocationType; @@ -841,6 +1466,26 @@ typedef enum VkFormat { VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM = 1000330000, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 = 1000330001, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 = 1000330002, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM = 1000330003, + VK_FORMAT_A4R4G4B4_UNORM_PACK16 = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16 = 1000340001, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK = 1000066000, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK = 1000066001, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK = 1000066002, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK = 1000066003, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK = 1000066004, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK = 1000066005, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK = 1000066006, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK = 1000066007, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK = 1000066008, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK = 1000066009, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK = 1000066010, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013, VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, @@ -849,20 +1494,20 @@ typedef enum VkFormat { VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, - VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = 1000066000, - VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = 1000066001, - VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = 1000066002, - VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = 1000066003, - VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = 1000066004, - VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = 1000066005, - VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = 1000066006, - VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = 1000066007, - VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = 1000066008, - VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = 1000066009, - VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = 1000066010, - VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = 1000066011, - VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = 1000066012, - VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = 1000066013, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, @@ -897,41 +1542,35 @@ typedef enum VkFormat { VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, - VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, - VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, VK_FORMAT_MAX_ENUM = 0x7FFFFFFF } VkFormat; -typedef enum VkImageType { - VK_IMAGE_TYPE_1D = 0, - VK_IMAGE_TYPE_2D = 1, - VK_IMAGE_TYPE_3D = 2, - VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, - VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, - VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), - VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageType; - typedef enum VkImageTiling { VK_IMAGE_TILING_OPTIMAL = 0, VK_IMAGE_TILING_LINEAR = 1, VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, - VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, - VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF } VkImageTiling; +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + typedef enum VkPhysicalDeviceType { VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, VK_PHYSICAL_DEVICE_TYPE_CPU = 4, - VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, - VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, - VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkPhysicalDeviceType; @@ -939,62 +1578,27 @@ typedef enum VkQueryType { VK_QUERY_TYPE_OCCLUSION = 0, VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, VK_QUERY_TYPE_TIMESTAMP = 2, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, +#endif VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, + VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, - VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, - VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, - VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000, +#endif VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF } VkQueryType; typedef enum VkSharingMode { VK_SHARING_MODE_EXCLUSIVE = 0, VK_SHARING_MODE_CONCURRENT = 1, - VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, - VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, - VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF } VkSharingMode; -typedef enum VkImageLayout { - VK_IMAGE_LAYOUT_UNDEFINED = 0, - VK_IMAGE_LAYOUT_GENERAL = 1, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, - VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, - VK_IMAGE_LAYOUT_PREINITIALIZED = 8, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, - VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, - VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003, - VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, - VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, - VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, - VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, - VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), - VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF -} VkImageLayout; - -typedef enum VkImageViewType { - VK_IMAGE_VIEW_TYPE_1D = 0, - VK_IMAGE_VIEW_TYPE_2D = 1, - VK_IMAGE_VIEW_TYPE_3D = 2, - VK_IMAGE_VIEW_TYPE_CUBE = 3, - VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, - VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, - VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, - VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, - VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, - VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), - VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageViewType; - typedef enum VkComponentSwizzle { VK_COMPONENT_SWIZZLE_IDENTITY = 0, VK_COMPONENT_SWIZZLE_ZERO = 1, @@ -1003,111 +1607,19 @@ typedef enum VkComponentSwizzle { VK_COMPONENT_SWIZZLE_G = 4, VK_COMPONENT_SWIZZLE_B = 5, VK_COMPONENT_SWIZZLE_A = 6, - VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, - VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF } VkComponentSwizzle; -typedef enum VkVertexInputRate { - VK_VERTEX_INPUT_RATE_VERTEX = 0, - VK_VERTEX_INPUT_RATE_INSTANCE = 1, - VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, - VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, - VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), - VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF -} VkVertexInputRate; - -typedef enum VkPrimitiveTopology { - VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, - VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, - VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, - VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), - VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF -} VkPrimitiveTopology; - -typedef enum VkPolygonMode { - VK_POLYGON_MODE_FILL = 0, - VK_POLYGON_MODE_LINE = 1, - VK_POLYGON_MODE_POINT = 2, - VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, - VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, - VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, - VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), - VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF -} VkPolygonMode; - -typedef enum VkFrontFace { - VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, - VK_FRONT_FACE_CLOCKWISE = 1, - VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, - VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, - VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), - VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF -} VkFrontFace; - -typedef enum VkCompareOp { - VK_COMPARE_OP_NEVER = 0, - VK_COMPARE_OP_LESS = 1, - VK_COMPARE_OP_EQUAL = 2, - VK_COMPARE_OP_LESS_OR_EQUAL = 3, - VK_COMPARE_OP_GREATER = 4, - VK_COMPARE_OP_NOT_EQUAL = 5, - VK_COMPARE_OP_GREATER_OR_EQUAL = 6, - VK_COMPARE_OP_ALWAYS = 7, - VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, - VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, - VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), - VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF -} VkCompareOp; - -typedef enum VkStencilOp { - VK_STENCIL_OP_KEEP = 0, - VK_STENCIL_OP_ZERO = 1, - VK_STENCIL_OP_REPLACE = 2, - VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, - VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, - VK_STENCIL_OP_INVERT = 5, - VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, - VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, - VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, - VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, - VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), - VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF -} VkStencilOp; - -typedef enum VkLogicOp { - VK_LOGIC_OP_CLEAR = 0, - VK_LOGIC_OP_AND = 1, - VK_LOGIC_OP_AND_REVERSE = 2, - VK_LOGIC_OP_COPY = 3, - VK_LOGIC_OP_AND_INVERTED = 4, - VK_LOGIC_OP_NO_OP = 5, - VK_LOGIC_OP_XOR = 6, - VK_LOGIC_OP_OR = 7, - VK_LOGIC_OP_NOR = 8, - VK_LOGIC_OP_EQUIVALENT = 9, - VK_LOGIC_OP_INVERT = 10, - VK_LOGIC_OP_OR_REVERSE = 11, - VK_LOGIC_OP_COPY_INVERTED = 12, - VK_LOGIC_OP_OR_INVERTED = 13, - VK_LOGIC_OP_NAND = 14, - VK_LOGIC_OP_SET = 15, - VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, - VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, - VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), - VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF -} VkLogicOp; +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; typedef enum VkBlendFactor { VK_BLEND_FACTOR_ZERO = 0, @@ -1129,9 +1641,6 @@ typedef enum VkBlendFactor { VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, VK_BLEND_FACTOR_SRC1_ALPHA = 17, VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, - VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, - VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, - VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF } VkBlendFactor; @@ -1187,12 +1696,21 @@ typedef enum VkBlendOp { VK_BLEND_OP_RED_EXT = 1000148043, VK_BLEND_OP_GREEN_EXT = 1000148044, VK_BLEND_OP_BLUE_EXT = 1000148045, - VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, - VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, - VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF } VkBlendOp; +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + typedef enum VkDynamicState { VK_DYNAMIC_STATE_VIEWPORT = 0, VK_DYNAMIC_STATE_SCISSOR = 1, @@ -1203,39 +1721,139 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_CULL_MODE = 1000267000, + VK_DYNAMIC_STATE_FRONT_FACE = 1000267001, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY = 1000267002, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT = 1000267003, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT = 1000267004, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE = 1000267005, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE = 1000267006, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE = 1000267007, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP = 1000267008, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE = 1000267009, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE = 1000267010, + VK_DYNAMIC_STATE_STENCIL_OP = 1000267011, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000, - VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, - VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), + VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, + VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, + VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, + VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, + VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, + VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_OP_EXT = VK_DYNAMIC_STATE_STENCIL_OP, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF } VkDynamicState; +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003, + VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004, + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + typedef enum VkFilter { VK_FILTER_NEAREST = 0, VK_FILTER_LINEAR = 1, VK_FILTER_CUBIC_IMG = 1000015000, VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG, - VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, - VK_FILTER_END_RANGE = VK_FILTER_LINEAR, - VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), VK_FILTER_MAX_ENUM = 0x7FFFFFFF } VkFilter; -typedef enum VkSamplerMipmapMode { - VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, - VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, - VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, - VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, - VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), - VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerMipmapMode; - typedef enum VkSamplerAddressMode { VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, @@ -1243,24 +1861,14 @@ typedef enum VkSamplerAddressMode { VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, - VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, - VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF } VkSamplerAddressMode; -typedef enum VkBorderColor { - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, - VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, - VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, - VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, - VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, - VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, - VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, - VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), - VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF -} VkBorderColor; +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_SAMPLER = 0, @@ -1274,11 +1882,11 @@ typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, - VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, - VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, - VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = 1000351000, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorType; @@ -1286,118 +1894,107 @@ typedef enum VkAttachmentLoadOp { VK_ATTACHMENT_LOAD_OP_LOAD = 0, VK_ATTACHMENT_LOAD_OP_CLEAR = 1, VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), + VK_ATTACHMENT_LOAD_OP_NONE_EXT = 1000400000, VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF } VkAttachmentLoadOp; typedef enum VkAttachmentStoreOp { VK_ATTACHMENT_STORE_OP_STORE = 0, VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, - VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, - VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), + VK_ATTACHMENT_STORE_OP_NONE = 1000301000, + VK_ATTACHMENT_STORE_OP_NONE_KHR = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_QCOM = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_EXT = VK_ATTACHMENT_STORE_OP_NONE, VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF } VkAttachmentStoreOp; typedef enum VkPipelineBindPoint { VK_PIPELINE_BIND_POINT_GRAPHICS = 0, VK_PIPELINE_BIND_POINT_COMPUTE = 1, - VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = 1000165000, - VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, - VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, - VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF } VkPipelineBindPoint; typedef enum VkCommandBufferLevel { VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, - VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, - VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF } VkCommandBufferLevel; typedef enum VkIndexType { VK_INDEX_TYPE_UINT16 = 0, VK_INDEX_TYPE_UINT32 = 1, - VK_INDEX_TYPE_NONE_NV = 1000165000, + VK_INDEX_TYPE_NONE_KHR = 1000165000, VK_INDEX_TYPE_UINT8_EXT = 1000265000, - VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, - VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, - VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF } VkIndexType; typedef enum VkSubpassContents { VK_SUBPASS_CONTENTS_INLINE = 0, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, - VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, - VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, - VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF } VkSubpassContents; -typedef enum VkObjectType { - VK_OBJECT_TYPE_UNKNOWN = 0, - VK_OBJECT_TYPE_INSTANCE = 1, - VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, - VK_OBJECT_TYPE_DEVICE = 3, - VK_OBJECT_TYPE_QUEUE = 4, - VK_OBJECT_TYPE_SEMAPHORE = 5, - VK_OBJECT_TYPE_COMMAND_BUFFER = 6, - VK_OBJECT_TYPE_FENCE = 7, - VK_OBJECT_TYPE_DEVICE_MEMORY = 8, - VK_OBJECT_TYPE_BUFFER = 9, - VK_OBJECT_TYPE_IMAGE = 10, - VK_OBJECT_TYPE_EVENT = 11, - VK_OBJECT_TYPE_QUERY_POOL = 12, - VK_OBJECT_TYPE_BUFFER_VIEW = 13, - VK_OBJECT_TYPE_IMAGE_VIEW = 14, - VK_OBJECT_TYPE_SHADER_MODULE = 15, - VK_OBJECT_TYPE_PIPELINE_CACHE = 16, - VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, - VK_OBJECT_TYPE_RENDER_PASS = 18, - VK_OBJECT_TYPE_PIPELINE = 19, - VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, - VK_OBJECT_TYPE_SAMPLER = 21, - VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, - VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, - VK_OBJECT_TYPE_FRAMEBUFFER = 24, - VK_OBJECT_TYPE_COMMAND_POOL = 25, - VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, - VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, - VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, - VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, - VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, - VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, - VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, - VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, - VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, - VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, - VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, - VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, - VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, - VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), - VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkObjectType; +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_NONE = 0, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, + VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; -typedef enum VkVendorId { - VK_VENDOR_ID_VIV = 0x10001, - VK_VENDOR_ID_VSI = 0x10002, - VK_VENDOR_ID_KAZAN = 0x10003, - VK_VENDOR_ID_BEGIN_RANGE = VK_VENDOR_ID_VIV, - VK_VENDOR_ID_END_RANGE = VK_VENDOR_ID_KAZAN, - VK_VENDOR_ID_RANGE_SIZE = (VK_VENDOR_ID_KAZAN - VK_VENDOR_ID_VIV + 1), - VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF -} VkVendorId; -typedef VkFlags VkInstanceCreateFlags; +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_NONE_KHR = 0, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, @@ -1422,11 +2019,26 @@ typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, +#endif + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, + VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, +#endif VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, @@ -1439,21 +2051,6 @@ typedef enum VkFormatFeatureFlagBits { } VkFormatFeatureFlagBits; typedef VkFlags VkFormatFeatureFlags; -typedef enum VkImageUsageFlagBits { - VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, - VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, - VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, - VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, - VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100, - VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, - VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageUsageFlagBits; -typedef VkFlags VkImageUsageFlags; - typedef enum VkImageCreateFlagBits { VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, @@ -1470,6 +2067,7 @@ typedef enum VkImageCreateFlagBits { VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, + VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000, VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, @@ -1492,15 +2090,49 @@ typedef enum VkSampleCountFlagBits { } VkSampleCountFlagBits; typedef VkFlags VkSampleCountFlags; -typedef enum VkQueueFlagBits { - VK_QUEUE_GRAPHICS_BIT = 0x00000001, - VK_QUEUE_COMPUTE_BIT = 0x00000002, - VK_QUEUE_TRANSFER_BIT = 0x00000004, - VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, - VK_QUEUE_PROTECTED_BIT = 0x00000010, - VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueueFlagBits; -typedef VkFlags VkQueueFlags; +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, +#endif + VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, + VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, +#endif + VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000, + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; typedef enum VkMemoryPropertyFlagBits { VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, @@ -1511,17 +2143,26 @@ typedef enum VkMemoryPropertyFlagBits { VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040, VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080, + VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV = 0x00000100, VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkMemoryPropertyFlagBits; typedef VkFlags VkMemoryPropertyFlags; -typedef enum VkMemoryHeapFlagBits { - VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, - VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryHeapFlagBits; -typedef VkFlags VkMemoryHeapFlags; +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, +#endif + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; typedef VkFlags VkDeviceCreateFlags; typedef enum VkDeviceQueueCreateFlagBits { @@ -1548,38 +2189,30 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_NONE = 0, VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, - VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, - VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000, - VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = 0x00200000, - VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000, VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineStageFlagBits; typedef VkFlags VkPipelineStageFlags; typedef VkFlags VkMemoryMapFlags; -typedef enum VkImageAspectFlagBits { - VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, - VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, - VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, - VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, - VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, - VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, - VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, - VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, - VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, - VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, - VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, - VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, - VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, - VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, - VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageAspectFlagBits; -typedef VkFlags VkImageAspectFlags; +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; typedef enum VkSparseImageFormatFlagBits { VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, @@ -1589,20 +2222,19 @@ typedef enum VkSparseImageFormatFlagBits { } VkSparseImageFormatFlagBits; typedef VkFlags VkSparseImageFormatFlags; -typedef enum VkSparseMemoryBindFlagBits { - VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, - VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseMemoryBindFlagBits; -typedef VkFlags VkSparseMemoryBindFlags; - typedef enum VkFenceCreateFlagBits { VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkFenceCreateFlagBits; typedef VkFlags VkFenceCreateFlags; typedef VkFlags VkSemaphoreCreateFlags; + +typedef enum VkEventCreateFlagBits { + VK_EVENT_CREATE_DEVICE_ONLY_BIT = 0x00000001, + VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkEventCreateFlagBits; typedef VkFlags VkEventCreateFlags; -typedef VkFlags VkQueryPoolCreateFlags; typedef enum VkQueryPipelineStatisticFlagBits { VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, @@ -1619,12 +2251,16 @@ typedef enum VkQueryPipelineStatisticFlagBits { VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueryPipelineStatisticFlagBits; typedef VkFlags VkQueryPipelineStatisticFlags; +typedef VkFlags VkQueryPoolCreateFlags; typedef enum VkQueryResultFlagBits { VK_QUERY_RESULT_64_BIT = 0x00000001, VK_QUERY_RESULT_WAIT_BIT = 0x00000002, VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, +#endif VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueryResultFlagBits; typedef VkFlags VkQueryResultFlags; @@ -1634,7 +2270,9 @@ typedef enum VkBufferCreateFlagBits { VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, - VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000010, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkBufferCreateFlagBits; typedef VkFlags VkBufferCreateFlags; @@ -1649,11 +2287,28 @@ typedef enum VkBufferUsageFlagBits { VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, +#endif VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, - VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400, - VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = 0x00020000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, + VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, +#endif + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkBufferUsageFlagBits; typedef VkFlags VkBufferUsageFlags; @@ -1661,34 +2316,67 @@ typedef VkFlags VkBufferViewCreateFlags; typedef enum VkImageViewCreateFlagBits { VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageViewCreateFlagBits; typedef VkFlags VkImageViewCreateFlags; - -typedef enum VkShaderModuleCreateFlagBits { - VK_SHADER_MODULE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkShaderModuleCreateFlagBits; typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; typedef VkFlags VkPipelineCacheCreateFlags; +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; + typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, - VK_PIPELINE_CREATE_DISPATCH_BASE = 0x00000010, + VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000, + VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineCreateFlagBits; typedef VkFlags VkPipelineCreateFlags; typedef enum VkPipelineShaderStageCreateFlagBits { - VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000001, - VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000002, + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT = 0x00000001, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT = 0x00000002, + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineShaderStageCreateFlagBits; typedef VkFlags VkPipelineShaderStageCreateFlags; @@ -1702,21 +2390,23 @@ typedef enum VkShaderStageFlagBits { VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, VK_SHADER_STAGE_ALL = 0x7FFFFFFF, - VK_SHADER_STAGE_RAYGEN_BIT_NV = 0x00000100, - VK_SHADER_STAGE_ANY_HIT_BIT_NV = 0x00000200, - VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = 0x00000400, - VK_SHADER_STAGE_MISS_BIT_NV = 0x00000800, - VK_SHADER_STAGE_INTERSECTION_BIT_NV = 0x00001000, - VK_SHADER_STAGE_CALLABLE_BIT_NV = 0x00002000, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040, VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080, + VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkShaderStageFlagBits; -typedef VkFlags VkPipelineVertexInputStateCreateFlags; -typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; -typedef VkFlags VkPipelineTessellationStateCreateFlags; -typedef VkFlags VkPipelineViewportStateCreateFlags; -typedef VkFlags VkPipelineRasterizationStateCreateFlags; typedef enum VkCullModeFlagBits { VK_CULL_MODE_NONE = 0, @@ -1726,18 +2416,25 @@ typedef enum VkCullModeFlagBits { VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkCullModeFlagBits; typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; typedef VkFlags VkPipelineMultisampleStateCreateFlags; + +typedef enum VkPipelineDepthStencilStateCreateFlagBits { + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = 0x00000001, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = 0x00000002, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineDepthStencilStateCreateFlagBits; typedef VkFlags VkPipelineDepthStencilStateCreateFlags; -typedef VkFlags VkPipelineColorBlendStateCreateFlags; -typedef enum VkColorComponentFlagBits { - VK_COLOR_COMPONENT_R_BIT = 0x00000001, - VK_COLOR_COMPONENT_G_BIT = 0x00000002, - VK_COLOR_COMPONENT_B_BIT = 0x00000004, - VK_COLOR_COMPONENT_A_BIT = 0x00000008, - VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkColorComponentFlagBits; -typedef VkFlags VkColorComponentFlags; +typedef enum VkPipelineColorBlendStateCreateFlagBits { + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = 0x00000001, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineColorBlendStateCreateFlagBits; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; typedef VkFlags VkPipelineDynamicStateCreateFlags; typedef VkFlags VkPipelineLayoutCreateFlags; typedef VkFlags VkShaderStageFlags; @@ -1749,31 +2446,24 @@ typedef enum VkSamplerCreateFlagBits { } VkSamplerCreateFlagBits; typedef VkFlags VkSamplerCreateFlags; -typedef enum VkDescriptorSetLayoutCreateFlagBits { - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = 0x00000002, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorSetLayoutCreateFlagBits; -typedef VkFlags VkDescriptorSetLayoutCreateFlags; - typedef enum VkDescriptorPoolCreateFlagBits { VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, - VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = 0x00000004, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkDescriptorPoolCreateFlagBits; typedef VkFlags VkDescriptorPoolCreateFlags; typedef VkFlags VkDescriptorPoolResetFlags; -typedef enum VkFramebufferCreateFlagBits { - VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = 0x00000001, - VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFramebufferCreateFlagBits; -typedef VkFlags VkFramebufferCreateFlags; - -typedef enum VkRenderPassCreateFlagBits { - VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkRenderPassCreateFlagBits; -typedef VkFlags VkRenderPassCreateFlags; +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = 0x00000004, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; typedef enum VkAttachmentDescriptionFlagBits { VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, @@ -1781,46 +2471,6 @@ typedef enum VkAttachmentDescriptionFlagBits { } VkAttachmentDescriptionFlagBits; typedef VkFlags VkAttachmentDescriptionFlags; -typedef enum VkSubpassDescriptionFlagBits { - VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, - VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, - VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassDescriptionFlagBits; -typedef VkFlags VkSubpassDescriptionFlags; - -typedef enum VkAccessFlagBits { - VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, - VK_ACCESS_INDEX_READ_BIT = 0x00000002, - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, - VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, - VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, - VK_ACCESS_SHADER_READ_BIT = 0x00000020, - VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, - VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, - VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, - VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, - VK_ACCESS_HOST_READ_BIT = 0x00002000, - VK_ACCESS_HOST_WRITE_BIT = 0x00004000, - VK_ACCESS_MEMORY_READ_BIT = 0x00008000, - VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, - VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, - VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, - VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, - VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, - VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, - VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, - VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, - VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000, - VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000, - VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000, - VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, - VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAccessFlagBits; -typedef VkFlags VkAccessFlags; - typedef enum VkDependencyFlagBits { VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, @@ -1831,6 +2481,31 @@ typedef enum VkDependencyFlagBits { } VkDependencyFlagBits; typedef VkFlags VkDependencyFlags; +typedef enum VkFramebufferCreateFlagBits { + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFramebufferCreateFlagBits; +typedef VkFlags VkFramebufferCreateFlags; + +typedef enum VkRenderPassCreateFlagBits { + VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002, + VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderPassCreateFlagBits; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004, + VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = 0x00000010, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = 0x00000020, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = 0x00000040, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + typedef enum VkCommandPoolCreateFlagBits { VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, @@ -1873,36 +2548,114 @@ typedef enum VkStencilFaceFlagBits { VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkStencilFaceFlagBits; typedef VkFlags VkStencilFaceFlags; -typedef struct VkApplicationInfo { - VkStructureType sType; - const void* pNext; - const char* pApplicationName; - uint32_t applicationVersion; - const char* pEngineName; - uint32_t engineVersion; - uint32_t apiVersion; -} VkApplicationInfo; +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; -typedef struct VkInstanceCreateInfo { - VkStructureType sType; - const void* pNext; - VkInstanceCreateFlags flags; - const VkApplicationInfo* pApplicationInfo; - uint32_t enabledLayerCount; - const char* const* ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char* const* ppEnabledExtensionNames; -} VkInstanceCreateInfo; +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; -typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( - void* pUserData, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; -typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkPipelineCacheHeaderVersionOne { + uint32_t headerSize; + VkPipelineCacheHeaderVersion headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; +} VkPipelineCacheHeaderVersionOne; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( void* pUserData, - void* pOriginal, size_t size, size_t alignment, VkSystemAllocationScope allocationScope); @@ -1923,6 +2676,14 @@ typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope); +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); typedef struct VkAllocationCallbacks { void* pUserData; PFN_vkAllocationFunction pfnAllocation; @@ -1932,6 +2693,51 @@ typedef struct VkAllocationCallbacks { PFN_vkInternalFreeNotification pfnInternalFree; } VkAllocationCallbacks; +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + typedef struct VkPhysicalDeviceFeatures { VkBool32 robustBufferAccess; VkBool32 fullDrawIndexUint32; @@ -1990,26 +2796,6 @@ typedef struct VkPhysicalDeviceFeatures { VkBool32 inheritedQueries; } VkPhysicalDeviceFeatures; -typedef struct VkFormatProperties { - VkFormatFeatureFlags linearTilingFeatures; - VkFormatFeatureFlags optimalTilingFeatures; - VkFormatFeatureFlags bufferFeatures; -} VkFormatProperties; - -typedef struct VkExtent3D { - uint32_t width; - uint32_t height; - uint32_t depth; -} VkExtent3D; - -typedef struct VkImageFormatProperties { - VkExtent3D maxExtent; - uint32_t maxMipLevels; - uint32_t maxArrayLayers; - VkSampleCountFlags sampleCounts; - VkDeviceSize maxResourceSize; -} VkImageFormatProperties; - typedef struct VkPhysicalDeviceLimits { uint32_t maxImageDimension1D; uint32_t maxImageDimension2D; @@ -2119,6 +2905,13 @@ typedef struct VkPhysicalDeviceLimits { VkDeviceSize nonCoherentAtomSize; } VkPhysicalDeviceLimits; +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + typedef struct VkPhysicalDeviceSparseProperties { VkBool32 residencyStandard2DBlockShape; VkBool32 residencyStandard2DMultisampleBlockShape; @@ -2146,24 +2939,6 @@ typedef struct VkQueueFamilyProperties { VkExtent3D minImageTransferGranularity; } VkQueueFamilyProperties; -typedef struct VkMemoryType { - VkMemoryPropertyFlags propertyFlags; - uint32_t heapIndex; -} VkMemoryType; - -typedef struct VkMemoryHeap { - VkDeviceSize size; - VkMemoryHeapFlags flags; -} VkMemoryHeap; - -typedef struct VkPhysicalDeviceMemoryProperties { - uint32_t memoryTypeCount; - VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; - uint32_t memoryHeapCount; - VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryProperties; - -typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); typedef struct VkDeviceQueueCreateInfo { VkStructureType sType; const void* pNext; @@ -2210,13 +2985,6 @@ typedef struct VkSubmitInfo { const VkSemaphore* pSignalSemaphores; } VkSubmitInfo; -typedef struct VkMemoryAllocateInfo { - VkStructureType sType; - const void* pNext; - VkDeviceSize allocationSize; - uint32_t memoryTypeIndex; -} VkMemoryAllocateInfo; - typedef struct VkMappedMemoryRange { VkStructureType sType; const void* pNext; @@ -2225,26 +2993,19 @@ typedef struct VkMappedMemoryRange { VkDeviceSize size; } VkMappedMemoryRange; +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + typedef struct VkMemoryRequirements { VkDeviceSize size; VkDeviceSize alignment; uint32_t memoryTypeBits; } VkMemoryRequirements; -typedef struct VkSparseImageFormatProperties { - VkImageAspectFlags aspectMask; - VkExtent3D imageGranularity; - VkSparseImageFormatFlags flags; -} VkSparseImageFormatProperties; - -typedef struct VkSparseImageMemoryRequirements { - VkSparseImageFormatProperties formatProperties; - uint32_t imageMipTailFirstLod; - VkDeviceSize imageMipTailSize; - VkDeviceSize imageMipTailOffset; - VkDeviceSize imageMipTailStride; -} VkSparseImageMemoryRequirements; - typedef struct VkSparseMemoryBind { VkDeviceSize resourceOffset; VkDeviceSize size; @@ -2271,12 +3032,6 @@ typedef struct VkImageSubresource { uint32_t arrayLayer; } VkImageSubresource; -typedef struct VkOffset3D { - int32_t x; - int32_t y; - int32_t z; -} VkOffset3D; - typedef struct VkSparseImageMemoryBind { VkImageSubresource subresource; VkOffset3D offset; @@ -2307,6 +3062,20 @@ typedef struct VkBindSparseInfo { const VkSemaphore* pSignalSemaphores; } VkBindSparseInfo; +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + typedef struct VkFenceCreateInfo { VkStructureType sType; const void* pNext; @@ -2388,14 +3157,6 @@ typedef struct VkComponentMapping { VkComponentSwizzle a; } VkComponentMapping; -typedef struct VkImageSubresourceRange { - VkImageAspectFlags aspectMask; - uint32_t baseMipLevel; - uint32_t levelCount; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceRange; - typedef struct VkImageViewCreateInfo { VkStructureType sType; const void* pNext; @@ -2446,8 +3207,18 @@ typedef struct VkPipelineShaderStageCreateInfo { const VkSpecializationInfo* pSpecializationInfo; } VkPipelineShaderStageCreateInfo; -typedef struct VkVertexInputBindingDescription { - uint32_t binding; +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; uint32_t stride; VkVertexInputRate inputRate; } VkVertexInputBindingDescription; @@ -2493,21 +3264,6 @@ typedef struct VkViewport { float maxDepth; } VkViewport; -typedef struct VkOffset2D { - int32_t x; - int32_t y; -} VkOffset2D; - -typedef struct VkExtent2D { - uint32_t width; - uint32_t height; -} VkExtent2D; - -typedef struct VkRect2D { - VkOffset2D offset; - VkExtent2D extent; -} VkRect2D; - typedef struct VkPipelineViewportStateCreateInfo { VkStructureType sType; const void* pNext; @@ -2623,16 +3379,6 @@ typedef struct VkGraphicsPipelineCreateInfo { int32_t basePipelineIndex; } VkGraphicsPipelineCreateInfo; -typedef struct VkComputePipelineCreateInfo { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - VkPipelineShaderStageCreateInfo stage; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkComputePipelineCreateInfo; - typedef struct VkPushConstantRange { VkShaderStageFlags stageFlags; uint32_t offset; @@ -2670,21 +3416,29 @@ typedef struct VkSamplerCreateInfo { VkBool32 unnormalizedCoordinates; } VkSamplerCreateInfo; -typedef struct VkDescriptorSetLayoutBinding { - uint32_t binding; - VkDescriptorType descriptorType; - uint32_t descriptorCount; - VkShaderStageFlags stageFlags; - const VkSampler* pImmutableSamplers; -} VkDescriptorSetLayoutBinding; +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; -typedef struct VkDescriptorSetLayoutCreateInfo { - VkStructureType sType; - const void* pNext; - VkDescriptorSetLayoutCreateFlags flags; - uint32_t bindingCount; - const VkDescriptorSetLayoutBinding* pBindings; -} VkDescriptorSetLayoutCreateInfo; +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; typedef struct VkDescriptorPoolSize { VkDescriptorType type; @@ -2708,17 +3462,21 @@ typedef struct VkDescriptorSetAllocateInfo { const VkDescriptorSetLayout* pSetLayouts; } VkDescriptorSetAllocateInfo; -typedef struct VkDescriptorImageInfo { - VkSampler sampler; - VkImageView imageView; - VkImageLayout imageLayout; -} VkDescriptorImageInfo; +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; -typedef struct VkDescriptorBufferInfo { - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize range; -} VkDescriptorBufferInfo; +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; typedef struct VkWriteDescriptorSet { VkStructureType sType; @@ -2733,30 +3491,6 @@ typedef struct VkWriteDescriptorSet { const VkBufferView* pTexelBufferView; } VkWriteDescriptorSet; -typedef struct VkCopyDescriptorSet { - VkStructureType sType; - const void* pNext; - VkDescriptorSet srcSet; - uint32_t srcBinding; - uint32_t srcArrayElement; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; -} VkCopyDescriptorSet; - -typedef struct VkFramebufferCreateInfo { - VkStructureType sType; - const void* pNext; - VkFramebufferCreateFlags flags; - VkRenderPass renderPass; - uint32_t attachmentCount; - const VkImageView* pAttachments; - uint32_t width; - uint32_t height; - uint32_t layers; -} VkFramebufferCreateInfo; - typedef struct VkAttachmentDescription { VkAttachmentDescriptionFlags flags; VkFormat format; @@ -2774,6 +3508,18 @@ typedef struct VkAttachmentReference { VkImageLayout layout; } VkAttachmentReference; +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + typedef struct VkSubpassDescription { VkSubpassDescriptionFlags flags; VkPipelineBindPoint pipelineBindPoint; @@ -2855,21 +3601,6 @@ typedef struct VkImageSubresourceLayers { uint32_t layerCount; } VkImageSubresourceLayers; -typedef struct VkImageCopy { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageCopy; - -typedef struct VkImageBlit { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffsets[2]; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffsets[2]; -} VkImageBlit; - typedef struct VkBufferImageCopy { VkDeviceSize bufferOffset; uint32_t bufferRowLength; @@ -2907,6 +3638,21 @@ typedef struct VkClearRect { uint32_t layerCount; } VkClearRect; +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + typedef struct VkImageResolve { VkImageSubresourceLayers srcSubresource; VkOffset3D srcOffset; @@ -2915,38 +3661,6 @@ typedef struct VkImageResolve { VkExtent3D extent; } VkImageResolve; -typedef struct VkMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; -} VkMemoryBarrier; - -typedef struct VkBufferMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize size; -} VkBufferMemoryBarrier; - -typedef struct VkImageMemoryBarrier { - VkStructureType sType; - const void* pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkImageLayout oldLayout; - VkImageLayout newLayout; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkImage image; - VkImageSubresourceRange subresourceRange; -} VkImageMemoryBarrier; - typedef struct VkRenderPassBeginInfo { VkStructureType sType; const void* pNext; @@ -2957,37 +3671,6 @@ typedef struct VkRenderPassBeginInfo { const VkClearValue* pClearValues; } VkRenderPassBeginInfo; -typedef struct VkDispatchIndirectCommand { - uint32_t x; - uint32_t y; - uint32_t z; -} VkDispatchIndirectCommand; - -typedef struct VkDrawIndexedIndirectCommand { - uint32_t indexCount; - uint32_t instanceCount; - uint32_t firstIndex; - int32_t vertexOffset; - uint32_t firstInstance; -} VkDrawIndexedIndirectCommand; - -typedef struct VkDrawIndirectCommand { - uint32_t vertexCount; - uint32_t instanceCount; - uint32_t firstVertex; - uint32_t firstInstance; -} VkDrawIndirectCommand; - -typedef struct VkBaseOutStructure { - VkStructureType sType; - struct VkBaseOutStructure* pNext; -} VkBaseOutStructure; - -typedef struct VkBaseInStructure { - VkStructureType sType; - const struct VkBaseInStructure* pNext; -} VkBaseInStructure; - typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); @@ -3928,22 +4611,19 @@ VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( #define VK_VERSION_1_1 1 // Vulkan 1.1 version number -#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) -#define VK_MAX_DEVICE_GROUP_SIZE 32 -#define VK_LUID_SIZE 8 -#define VK_QUEUE_FAMILY_EXTERNAL (~0U-1) +#define VK_MAX_DEVICE_GROUP_SIZE 32U +#define VK_LUID_SIZE 8U +#define VK_QUEUE_FAMILY_EXTERNAL (~1U) typedef enum VkPointClippingBehavior { VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, - VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, - VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, - VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1), VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF } VkPointClippingBehavior; @@ -3952,9 +4632,6 @@ typedef enum VkTessellationDomainOrigin { VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, - VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, - VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, - VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1), VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF } VkTessellationDomainOrigin; @@ -3969,9 +4646,6 @@ typedef enum VkSamplerYcbcrModelConversion { VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1), VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF } VkSamplerYcbcrModelConversion; @@ -3980,9 +4654,6 @@ typedef enum VkSamplerYcbcrRange { VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, - VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, - VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, - VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1), VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF } VkSamplerYcbcrRange; @@ -3991,9 +4662,6 @@ typedef enum VkChromaLocation { VK_CHROMA_LOCATION_MIDPOINT = 1, VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, - VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN, - VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT, - VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1), VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF } VkChromaLocation; @@ -4001,9 +4669,6 @@ typedef enum VkDescriptorUpdateTemplateType { VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1), VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorUpdateTemplateType; @@ -4036,7 +4701,11 @@ typedef VkFlags VkPeerMemoryFeatureFlags; typedef enum VkMemoryAllocateFlagBits { VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004, VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkMemoryAllocateFlagBits; typedef VkFlags VkMemoryAllocateFlags; @@ -4055,6 +4724,8 @@ typedef enum VkExternalMemoryHandleTypeFlagBits { VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, @@ -4119,6 +4790,8 @@ typedef enum VkExternalSemaphoreHandleTypeFlagBits { VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, @@ -4278,8 +4951,6 @@ typedef struct VkMemoryRequirements2 { VkMemoryRequirements memoryRequirements; } VkMemoryRequirements2; -typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; - typedef struct VkSparseImageMemoryRequirements2 { VkStructureType sType; void* pNext; @@ -4803,21 +5474,1817 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( #endif -#define VK_KHR_surface 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) -#define VK_KHR_SURFACE_SPEC_VERSION 25 -#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" +#define VK_VERSION_1_2 1 +// Vulkan 1.2 version number +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 + +#define VK_MAX_DRIVER_NAME_SIZE 256U +#define VK_MAX_DRIVER_INFO_SIZE 256U + +typedef enum VkDriverId { + VK_DRIVER_ID_AMD_PROPRIETARY = 1, + VK_DRIVER_ID_AMD_OPEN_SOURCE = 2, + VK_DRIVER_ID_MESA_RADV = 3, + VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8, + VK_DRIVER_ID_ARM_PROPRIETARY = 9, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10, + VK_DRIVER_ID_GGP_PROPRIETARY = 11, + VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, + VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, + VK_DRIVER_ID_COREAVI_PROPRIETARY = 15, + VK_DRIVER_ID_JUICE_PROPRIETARY = 16, + VK_DRIVER_ID_VERISILICON_PROPRIETARY = 17, + VK_DRIVER_ID_MESA_TURNIP = 18, + VK_DRIVER_ID_MESA_V3DV = 19, + VK_DRIVER_ID_MESA_PANVK = 20, + VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21, + VK_DRIVER_ID_MESA_VENUS = 22, + VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, + VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, + VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, + VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY, + VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF +} VkDriverId; + +typedef enum VkShaderFloatControlsIndependence { + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF +} VkShaderFloatControlsIndependence; + +typedef enum VkSamplerReductionMode { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0, + VK_SAMPLER_REDUCTION_MODE_MIN = 1, + VK_SAMPLER_REDUCTION_MODE_MAX = 2, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerReductionMode; + +typedef enum VkSemaphoreType { + VK_SEMAPHORE_TYPE_BINARY = 0, + VK_SEMAPHORE_TYPE_TIMELINE = 1, + VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY, + VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE, + VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreType; + +typedef enum VkResolveModeFlagBits { + VK_RESOLVE_MODE_NONE = 0, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001, + VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002, + VK_RESOLVE_MODE_MIN_BIT = 0x00000004, + VK_RESOLVE_MODE_MAX_BIT = 0x00000008, + VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT, + VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT, + VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT, + VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkResolveModeFlagBits; +typedef VkFlags VkResolveModeFlags; + +typedef enum VkDescriptorBindingFlagBits { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008, + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorBindingFlagBits; +typedef VkFlags VkDescriptorBindingFlags; + +typedef enum VkSemaphoreWaitFlagBits { + VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001, + VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT, + VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreWaitFlagBits; +typedef VkFlags VkSemaphoreWaitFlags; +typedef struct VkPhysicalDeviceVulkan11Features { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; + VkBool32 protectedMemory; + VkBool32 samplerYcbcrConversion; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceVulkan11Features; -typedef enum VkColorSpaceKHR { - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, - VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, - VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, - VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, - VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, - VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, - VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, - VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, - VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, +typedef struct VkPhysicalDeviceVulkan11Properties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; + uint32_t subgroupSize; + VkShaderStageFlags subgroupSupportedStages; + VkSubgroupFeatureFlags subgroupSupportedOperations; + VkBool32 subgroupQuadOperationsInAllStages; + VkPointClippingBehavior pointClippingBehavior; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + VkBool32 protectedNoFault; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceVulkan11Properties; + +typedef struct VkPhysicalDeviceVulkan12Features { + VkStructureType sType; + void* pNext; + VkBool32 samplerMirrorClampToEdge; + VkBool32 drawIndirectCount; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; + VkBool32 descriptorIndexing; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; + VkBool32 samplerFilterMinmax; + VkBool32 scalarBlockLayout; + VkBool32 imagelessFramebuffer; + VkBool32 uniformBufferStandardLayout; + VkBool32 shaderSubgroupExtendedTypes; + VkBool32 separateDepthStencilLayouts; + VkBool32 hostQueryReset; + VkBool32 timelineSemaphore; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; + VkBool32 shaderOutputViewportIndex; + VkBool32 shaderOutputLayer; + VkBool32 subgroupBroadcastDynamicId; +} VkPhysicalDeviceVulkan12Features; + +typedef struct VkConformanceVersion { + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; +} VkConformanceVersion; + +typedef struct VkPhysicalDeviceVulkan12Properties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; + uint64_t maxTimelineSemaphoreValueDifference; + VkSampleCountFlags framebufferIntegerColorSampleCounts; +} VkPhysicalDeviceVulkan12Properties; + +typedef struct VkImageFormatListCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfo; + +typedef struct VkAttachmentDescription2 { + VkStructureType sType; + const void* pNext; + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription2; + +typedef struct VkAttachmentReference2 { + VkStructureType sType; + const void* pNext; + uint32_t attachment; + VkImageLayout layout; + VkImageAspectFlags aspectMask; +} VkAttachmentReference2; + +typedef struct VkSubpassDescription2 { + VkStructureType sType; + const void* pNext; + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const VkAttachmentReference2* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference2* pColorAttachments; + const VkAttachmentReference2* pResolveAttachments; + const VkAttachmentReference2* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription2; + +typedef struct VkSubpassDependency2 { + VkStructureType sType; + const void* pNext; + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; + int32_t viewOffset; +} VkSubpassDependency2; + +typedef struct VkRenderPassCreateInfo2 { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription2* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription2* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency2* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; +} VkRenderPassCreateInfo2; + +typedef struct VkSubpassBeginInfo { + VkStructureType sType; + const void* pNext; + VkSubpassContents contents; +} VkSubpassBeginInfo; + +typedef struct VkSubpassEndInfo { + VkStructureType sType; + const void* pNext; +} VkSubpassEndInfo; + +typedef struct VkPhysicalDevice8BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; +} VkPhysicalDevice8BitStorageFeatures; + +typedef struct VkPhysicalDeviceDriverProperties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; +} VkPhysicalDeviceDriverProperties; + +typedef struct VkPhysicalDeviceShaderAtomicInt64Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; +} VkPhysicalDeviceShaderAtomicInt64Features; + +typedef struct VkPhysicalDeviceShaderFloat16Int8Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; +} VkPhysicalDeviceShaderFloat16Int8Features; + +typedef struct VkPhysicalDeviceFloatControlsProperties { + VkStructureType sType; + void* pNext; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; +} VkPhysicalDeviceFloatControlsProperties; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlags* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfo; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeatures; + +typedef struct VkPhysicalDeviceDescriptorIndexingProperties { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingProperties; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfo; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupport; + +typedef struct VkSubpassDescriptionDepthStencilResolve { + VkStructureType sType; + const void* pNext; + VkResolveModeFlagBits depthResolveMode; + VkResolveModeFlagBits stencilResolveMode; + const VkAttachmentReference2* pDepthStencilResolveAttachment; +} VkSubpassDescriptionDepthStencilResolve; + +typedef struct VkPhysicalDeviceDepthStencilResolveProperties { + VkStructureType sType; + void* pNext; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; +} VkPhysicalDeviceDepthStencilResolveProperties; + +typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 scalarBlockLayout; +} VkPhysicalDeviceScalarBlockLayoutFeatures; + +typedef struct VkImageStencilUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags stencilUsage; +} VkImageStencilUsageCreateInfo; + +typedef struct VkSamplerReductionModeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerReductionMode reductionMode; +} VkSamplerReductionModeCreateInfo; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxProperties; + +typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; +} VkPhysicalDeviceVulkanMemoryModelFeatures; + +typedef struct VkPhysicalDeviceImagelessFramebufferFeatures { + VkStructureType sType; + void* pNext; + VkBool32 imagelessFramebuffer; +} VkPhysicalDeviceImagelessFramebufferFeatures; + +typedef struct VkFramebufferAttachmentImageInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageUsageFlags usage; + uint32_t width; + uint32_t height; + uint32_t layerCount; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkFramebufferAttachmentImageInfo; + +typedef struct VkFramebufferAttachmentsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentImageInfoCount; + const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos; +} VkFramebufferAttachmentsCreateInfo; + +typedef struct VkRenderPassAttachmentBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkImageView* pAttachments; +} VkRenderPassAttachmentBeginInfo; + +typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 uniformBufferStandardLayout; +} VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + +typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupExtendedTypes; +} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures { + VkStructureType sType; + void* pNext; + VkBool32 separateDepthStencilLayouts; +} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +typedef struct VkAttachmentReferenceStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilLayout; +} VkAttachmentReferenceStencilLayout; + +typedef struct VkAttachmentDescriptionStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilInitialLayout; + VkImageLayout stencilFinalLayout; +} VkAttachmentDescriptionStencilLayout; + +typedef struct VkPhysicalDeviceHostQueryResetFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostQueryReset; +} VkPhysicalDeviceHostQueryResetFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures { + VkStructureType sType; + void* pNext; + VkBool32 timelineSemaphore; +} VkPhysicalDeviceTimelineSemaphoreFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreProperties { + VkStructureType sType; + void* pNext; + uint64_t maxTimelineSemaphoreValueDifference; +} VkPhysicalDeviceTimelineSemaphoreProperties; + +typedef struct VkSemaphoreTypeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreType semaphoreType; + uint64_t initialValue; +} VkSemaphoreTypeCreateInfo; + +typedef struct VkTimelineSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValueCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValueCount; + const uint64_t* pSignalSemaphoreValues; +} VkTimelineSemaphoreSubmitInfo; + +typedef struct VkSemaphoreWaitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreWaitFlags flags; + uint32_t semaphoreCount; + const VkSemaphore* pSemaphores; + const uint64_t* pValues; +} VkSemaphoreWaitInfo; + +typedef struct VkSemaphoreSignalInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; +} VkSemaphoreSignalInfo; + +typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeatures; + +typedef struct VkBufferDeviceAddressInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferDeviceAddressInfo; + +typedef struct VkBufferOpaqueCaptureAddressCreateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkBufferOpaqueCaptureAddressCreateInfo; + +typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkMemoryOpaqueCaptureAddressAllocateInfo; + +typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkDeviceMemoryOpaqueCaptureAddressInfo; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkResetQueryPool( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +#define VK_VERSION_1_3 1 +// Vulkan 1.3 version number +#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0 + +typedef uint64_t VkFlags64; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlot) + +typedef enum VkPipelineCreationFeedbackFlagBits { + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT = 0x00000001, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = 0x00000002, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = 0x00000004, + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT, + VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreationFeedbackFlagBits; +typedef VkFlags VkPipelineCreationFeedbackFlags; + +typedef enum VkToolPurposeFlagBits { + VK_TOOL_PURPOSE_VALIDATION_BIT = 0x00000001, + VK_TOOL_PURPOSE_PROFILING_BIT = 0x00000002, + VK_TOOL_PURPOSE_TRACING_BIT = 0x00000004, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT = 0x00000008, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT = 0x00000010, + VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020, + VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040, + VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = VK_TOOL_PURPOSE_VALIDATION_BIT, + VK_TOOL_PURPOSE_PROFILING_BIT_EXT = VK_TOOL_PURPOSE_PROFILING_BIT, + VK_TOOL_PURPOSE_TRACING_BIT_EXT = VK_TOOL_PURPOSE_TRACING_BIT, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkToolPurposeFlagBits; +typedef VkFlags VkToolPurposeFlags; + +typedef enum VkPrivateDataSlotCreateFlagBits { + VK_PRIVATE_DATA_SLOT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPrivateDataSlotCreateFlagBits; +typedef VkFlags VkPrivateDataSlotCreateFlags; +typedef VkFlags64 VkPipelineStageFlags2; + +// Flag bits for VkPipelineStageFlagBits2 +typedef VkFlags64 VkPipelineStageFlagBits2; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +#endif +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; + +typedef VkFlags64 VkAccessFlags2; + +// Flag bits for VkAccessFlagBits2 +typedef VkFlags64 VkAccessFlagBits2; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +#endif +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL; + + +typedef enum VkSubmitFlagBits { + VK_SUBMIT_PROTECTED_BIT = 0x00000001, + VK_SUBMIT_PROTECTED_BIT_KHR = VK_SUBMIT_PROTECTED_BIT, + VK_SUBMIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubmitFlagBits; +typedef VkFlags VkSubmitFlags; + +typedef enum VkRenderingFlagBits { + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001, + VK_RENDERING_SUSPENDING_BIT = 0x00000002, + VK_RENDERING_RESUMING_BIT = 0x00000004, + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, + VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, + VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderingFlagBits; +typedef VkFlags VkRenderingFlags; +typedef VkFlags64 VkFormatFeatureFlags2; + +// Flag bits for VkFormatFeatureFlagBits2 +typedef VkFlags64 VkFormatFeatureFlagBits2; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL; +#endif +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL; +#endif +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL; + +typedef struct VkPhysicalDeviceVulkan13Features { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; + VkBool32 pipelineCreationCacheControl; + VkBool32 privateData; + VkBool32 shaderDemoteToHelperInvocation; + VkBool32 shaderTerminateInvocation; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; + VkBool32 synchronization2; + VkBool32 textureCompressionASTC_HDR; + VkBool32 shaderZeroInitializeWorkgroupMemory; + VkBool32 dynamicRendering; + VkBool32 shaderIntegerDotProduct; + VkBool32 maintenance4; +} VkPhysicalDeviceVulkan13Features; + +typedef struct VkPhysicalDeviceVulkan13Properties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; + uint32_t maxInlineUniformTotalSize; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceVulkan13Properties; + +typedef struct VkPipelineCreationFeedback { + VkPipelineCreationFeedbackFlags flags; + uint64_t duration; +} VkPipelineCreationFeedback; + +typedef struct VkPipelineCreationFeedbackCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreationFeedback* pPipelineCreationFeedback; + uint32_t pipelineStageCreationFeedbackCount; + VkPipelineCreationFeedback* pPipelineStageCreationFeedbacks; +} VkPipelineCreationFeedbackCreateInfo; + +typedef struct VkPhysicalDeviceShaderTerminateInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderTerminateInvocation; +} VkPhysicalDeviceShaderTerminateInvocationFeatures; + +typedef struct VkPhysicalDeviceToolProperties { + VkStructureType sType; + void* pNext; + char name[VK_MAX_EXTENSION_NAME_SIZE]; + char version[VK_MAX_EXTENSION_NAME_SIZE]; + VkToolPurposeFlags purposes; + char description[VK_MAX_DESCRIPTION_SIZE]; + char layer[VK_MAX_EXTENSION_NAME_SIZE]; +} VkPhysicalDeviceToolProperties; + +typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDemoteToHelperInvocation; +} VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +typedef struct VkPhysicalDevicePrivateDataFeatures { + VkStructureType sType; + void* pNext; + VkBool32 privateData; +} VkPhysicalDevicePrivateDataFeatures; + +typedef struct VkDevicePrivateDataCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t privateDataSlotRequestCount; +} VkDevicePrivateDataCreateInfo; + +typedef struct VkPrivateDataSlotCreateInfo { + VkStructureType sType; + const void* pNext; + VkPrivateDataSlotCreateFlags flags; +} VkPrivateDataSlotCreateInfo; + +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeatures; + +typedef struct VkMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; +} VkMemoryBarrier2; + +typedef struct VkBufferMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier2; + +typedef struct VkImageMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier2; + +typedef struct VkDependencyInfo { + VkStructureType sType; + const void* pNext; + VkDependencyFlags dependencyFlags; + uint32_t memoryBarrierCount; + const VkMemoryBarrier2* pMemoryBarriers; + uint32_t bufferMemoryBarrierCount; + const VkBufferMemoryBarrier2* pBufferMemoryBarriers; + uint32_t imageMemoryBarrierCount; + const VkImageMemoryBarrier2* pImageMemoryBarriers; +} VkDependencyInfo; + +typedef struct VkSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; + VkPipelineStageFlags2 stageMask; + uint32_t deviceIndex; +} VkSemaphoreSubmitInfo; + +typedef struct VkCommandBufferSubmitInfo { + VkStructureType sType; + const void* pNext; + VkCommandBuffer commandBuffer; + uint32_t deviceMask; +} VkCommandBufferSubmitInfo; + +typedef struct VkSubmitInfo2 { + VkStructureType sType; + const void* pNext; + VkSubmitFlags flags; + uint32_t waitSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pWaitSemaphoreInfos; + uint32_t commandBufferInfoCount; + const VkCommandBufferSubmitInfo* pCommandBufferInfos; + uint32_t signalSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pSignalSemaphoreInfos; +} VkSubmitInfo2; + +typedef struct VkPhysicalDeviceSynchronization2Features { + VkStructureType sType; + void* pNext; + VkBool32 synchronization2; +} VkPhysicalDeviceSynchronization2Features; + +typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderZeroInitializeWorkgroupMemory; +} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +typedef struct VkPhysicalDeviceImageRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeatures; + +typedef struct VkBufferCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2; + +typedef struct VkCopyBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2* pRegions; +} VkCopyBufferInfo2; + +typedef struct VkImageCopy2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2; + +typedef struct VkCopyImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageInfo2; + +typedef struct VkBufferImageCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2; + +typedef struct VkCopyBufferToImageInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyBufferToImageInfo2; + +typedef struct VkCopyImageToBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyImageToBufferInfo2; + +typedef struct VkImageBlit2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2; + +typedef struct VkBlitImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2* pRegions; + VkFilter filter; +} VkBlitImageInfo2; + +typedef struct VkImageResolve2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2; + +typedef struct VkResolveImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2* pRegions; +} VkResolveImageInfo2; + +typedef struct VkPhysicalDeviceSubgroupSizeControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; +} VkPhysicalDeviceSubgroupSizeControlFeatures; + +typedef struct VkPhysicalDeviceSubgroupSizeControlProperties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; +} VkPhysicalDeviceSubgroupSizeControlProperties; + +typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo { + VkStructureType sType; + void* pNext; + uint32_t requiredSubgroupSize; +} VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + +typedef struct VkPhysicalDeviceInlineUniformBlockFeatures { + VkStructureType sType; + void* pNext; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; +} VkPhysicalDeviceInlineUniformBlockFeatures; + +typedef struct VkPhysicalDeviceInlineUniformBlockProperties { + VkStructureType sType; + void* pNext; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; +} VkPhysicalDeviceInlineUniformBlockProperties; + +typedef struct VkWriteDescriptorSetInlineUniformBlock { + VkStructureType sType; + const void* pNext; + uint32_t dataSize; + const void* pData; +} VkWriteDescriptorSetInlineUniformBlock; + +typedef struct VkDescriptorPoolInlineUniformBlockCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t maxInlineUniformBlockBindings; +} VkDescriptorPoolInlineUniformBlockCreateInfo; + +typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures { + VkStructureType sType; + void* pNext; + VkBool32 textureCompressionASTC_HDR; +} VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + +typedef struct VkRenderingAttachmentInfo { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkResolveModeFlagBits resolveMode; + VkImageView resolveImageView; + VkImageLayout resolveImageLayout; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkClearValue clearValue; +} VkRenderingAttachmentInfo; + +typedef struct VkRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + VkRect2D renderArea; + uint32_t layerCount; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkRenderingAttachmentInfo* pColorAttachments; + const VkRenderingAttachmentInfo* pDepthAttachment; + const VkRenderingAttachmentInfo* pStencilAttachment; +} VkRenderingInfo; + +typedef struct VkPipelineRenderingCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkPipelineRenderingCreateInfo; + +typedef struct VkPhysicalDeviceDynamicRenderingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRendering; +} VkPhysicalDeviceDynamicRenderingFeatures; + +typedef struct VkCommandBufferInheritanceRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; + VkSampleCountFlagBits rasterizationSamples; +} VkCommandBufferInheritanceRenderingInfo; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerDotProduct; +} VkPhysicalDeviceShaderIntegerDotProductFeatures; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductProperties { + VkStructureType sType; + void* pNext; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; +} VkPhysicalDeviceShaderIntegerDotProductProperties; + +typedef struct VkPhysicalDeviceTexelBufferAlignmentProperties { + VkStructureType sType; + void* pNext; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; +} VkPhysicalDeviceTexelBufferAlignmentProperties; + +typedef struct VkFormatProperties3 { + VkStructureType sType; + void* pNext; + VkFormatFeatureFlags2 linearTilingFeatures; + VkFormatFeatureFlags2 optimalTilingFeatures; + VkFormatFeatureFlags2 bufferFeatures; +} VkFormatProperties3; + +typedef struct VkPhysicalDeviceMaintenance4Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance4; +} VkPhysicalDeviceMaintenance4Features; + +typedef struct VkPhysicalDeviceMaintenance4Properties { + VkStructureType sType; + void* pNext; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceMaintenance4Properties; + +typedef struct VkDeviceBufferMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkBufferCreateInfo* pCreateInfo; +} VkDeviceBufferMemoryRequirements; + +typedef struct VkDeviceImageMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + VkImageAspectFlagBits planeAspect; +} VkDeviceImageMemoryRequirements; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolProperties)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlot)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlot)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRendering)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRendering)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdSetCullMode)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFace)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopology)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCount)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCount)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnable)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOp)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnable)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOp)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnable)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnable)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirements)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, @@ -4827,1361 +7294,2647 @@ typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, - VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF } VkColorSpaceKHR; -typedef enum VkPresentModeKHR { - VK_PRESENT_MODE_IMMEDIATE_KHR = 0, - VK_PRESENT_MODE_MAILBOX_KHR = 1, - VK_PRESENT_MODE_FIFO_KHR = 2, - VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, - VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, - VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, - VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, - VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), - VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPresentModeKHR; +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + + +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + + +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) +#define VK_KHR_DISPLAY_SPEC_VERSION 23 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + + +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +#define VK_KHR_dynamic_rendering 1 +#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" +typedef VkRenderingFlags VkRenderingFlagsKHR; + +typedef VkRenderingFlagBits VkRenderingFlagBitsKHR; + +typedef VkRenderingInfo VkRenderingInfoKHR; + +typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR; + +typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR; + +typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR; + +typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; + +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( + VkCommandBuffer commandBuffer); +#endif + + +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + + +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + + +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" +#define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + + +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + + +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + + +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif -typedef enum VkSurfaceTransformFlagBitsKHR { - VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, - VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, - VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, - VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, - VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, - VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSurfaceTransformFlagBitsKHR; -typedef VkFlags VkSurfaceTransformFlagsKHR; -typedef enum VkCompositeAlphaFlagBitsKHR { - VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, - VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, - VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, - VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkCompositeAlphaFlagBitsKHR; -typedef VkFlags VkCompositeAlphaFlagsKHR; -typedef struct VkSurfaceCapabilitiesKHR { - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; -} VkSurfaceCapabilitiesKHR; +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + + +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + + +#define VK_KHR_shader_float16_int8 1 +#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; + +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; + + + +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + + +#define VK_KHR_imageless_framebuffer 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" +typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; + +typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; + +typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; + +typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; + + + +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" +typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; + +typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; + +typedef VkAttachmentReference2 VkAttachmentReference2KHR; + +typedef VkSubpassDescription2 VkSubpassDescription2KHR; + +typedef VkSubpassDependency2 VkSubpassDependency2KHR; + +typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; + +typedef VkSubpassEndInfo VkSubpassEndInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); +#endif -typedef struct VkSurfaceFormatKHR { - VkFormat format; - VkColorSpaceKHR colorSpace; -} VkSurfaceFormatKHR; -typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( - VkInstance instance, - VkSurfaceKHR surface, - const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32* pSupported); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pPresentModeCount, - VkPresentModeKHR* pPresentModes); + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); #endif -#define VK_KHR_swapchain 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) -#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 -#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" +typedef VkFenceImportFlags VkFenceImportFlagsKHR; -typedef enum VkSwapchainCreateFlagBitsKHR { - VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, - VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, - VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, - VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSwapchainCreateFlagBitsKHR; -typedef VkFlags VkSwapchainCreateFlagsKHR; +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; -typedef enum VkDeviceGroupPresentModeFlagBitsKHR { - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, - VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, - VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, - VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDeviceGroupPresentModeFlagBitsKHR; -typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; -typedef struct VkSwapchainCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkSwapchainCreateFlagsKHR flags; - VkSurfaceKHR surface; - uint32_t minImageCount; - VkFormat imageFormat; - VkColorSpaceKHR imageColorSpace; - VkExtent2D imageExtent; - uint32_t imageArrayLayers; - VkImageUsageFlags imageUsage; - VkSharingMode imageSharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - VkSurfaceTransformFlagBitsKHR preTransform; - VkCompositeAlphaFlagBitsKHR compositeAlpha; - VkPresentModeKHR presentMode; - VkBool32 clipped; - VkSwapchainKHR oldSwapchain; -} VkSwapchainCreateInfoKHR; +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; -typedef struct VkPresentInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - uint32_t swapchainCount; - const VkSwapchainKHR* pSwapchains; - const uint32_t* pImageIndices; - VkResult* pResults; -} VkPresentInfoKHR; -typedef struct VkImageSwapchainCreateInfoKHR { + +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +#define VK_KHR_performance_query 1 +#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" + +typedef enum VkPerformanceCounterUnitKHR { + VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, + VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, + VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, + VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, + VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, + VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, + VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, + VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, + VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, + VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterUnitKHR; + +typedef enum VkPerformanceCounterScopeKHR { + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, + VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterScopeKHR; + +typedef enum VkPerformanceCounterStorageKHR { + VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, + VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, + VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, + VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, + VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterStorageKHR; + +typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterDescriptionFlagBitsKHR; +typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; + +typedef enum VkAcquireProfilingLockFlagBitsKHR { + VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAcquireProfilingLockFlagBitsKHR; +typedef VkFlags VkAcquireProfilingLockFlagsKHR; +typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; -} VkImageSwapchainCreateInfoKHR; + void* pNext; + VkBool32 performanceCounterQueryPools; + VkBool32 performanceCounterMultipleQueryPools; +} VkPhysicalDevicePerformanceQueryFeaturesKHR; -typedef struct VkBindImageMemorySwapchainInfoKHR { +typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; - uint32_t imageIndex; -} VkBindImageMemorySwapchainInfoKHR; + void* pNext; + VkBool32 allowCommandBufferQueryCopies; +} VkPhysicalDevicePerformanceQueryPropertiesKHR; -typedef struct VkAcquireNextImageInfoKHR { +typedef struct VkPerformanceCounterKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterUnitKHR unit; + VkPerformanceCounterScopeKHR scope; + VkPerformanceCounterStorageKHR storage; + uint8_t uuid[VK_UUID_SIZE]; +} VkPerformanceCounterKHR; + +typedef struct VkPerformanceCounterDescriptionKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterDescriptionFlagsKHR flags; + char name[VK_MAX_DESCRIPTION_SIZE]; + char category[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkPerformanceCounterDescriptionKHR; + +typedef struct VkQueryPoolPerformanceCreateInfoKHR { VkStructureType sType; const void* pNext; - VkSwapchainKHR swapchain; - uint64_t timeout; - VkSemaphore semaphore; - VkFence fence; - uint32_t deviceMask; -} VkAcquireNextImageInfoKHR; + uint32_t queueFamilyIndex; + uint32_t counterIndexCount; + const uint32_t* pCounterIndices; +} VkQueryPoolPerformanceCreateInfoKHR; + +typedef union VkPerformanceCounterResultKHR { + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; +} VkPerformanceCounterResultKHR; + +typedef struct VkAcquireProfilingLockInfoKHR { + VkStructureType sType; + const void* pNext; + VkAcquireProfilingLockFlagsKHR flags; + uint64_t timeout; +} VkAcquireProfilingLockInfoKHR; -typedef struct VkDeviceGroupPresentCapabilitiesKHR { - VkStructureType sType; - const void* pNext; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; - VkDeviceGroupPresentModeFlagsKHR modes; -} VkDeviceGroupPresentCapabilitiesKHR; +typedef struct VkPerformanceQuerySubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t counterPassIndex; +} VkPerformanceQuerySubmitInfoKHR; -typedef struct VkDeviceGroupPresentInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const uint32_t* pDeviceMasks; - VkDeviceGroupPresentModeFlagBitsKHR mode; -} VkDeviceGroupPresentInfoKHR; +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); -typedef struct VkDeviceGroupSwapchainCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceGroupPresentModeFlagsKHR modes; -} VkDeviceGroupSwapchainCreateInfoKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); -typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); -typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); -typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( VkDevice device, - const VkSwapchainCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchain); + const VkAcquireProfilingLockInfoKHR* pInfo); -VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( - VkDevice device, - VkSwapchainKHR swapchain, - const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( + VkDevice device); +#endif -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pSwapchainImageCount, - VkImage* pSwapchainImages); -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint64_t timeout, - VkSemaphore semaphore, - VkFence fence, - uint32_t* pImageIndex); +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" +#define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; -VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( - VkQueue queue, - const VkPresentInfoKHR* pPresentInfo); +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( - VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( - VkDevice device, - VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pRectCount, - VkRect2D* pRects); + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( - VkDevice device, - const VkAcquireNextImageInfoKHR* pAcquireInfo, - uint32_t* pImageIndex); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); #endif -#define VK_KHR_display 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) -#define VK_KHR_DISPLAY_SPEC_VERSION 23 -#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; -typedef enum VkDisplayPlaneAlphaFlagBitsKHR { - VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, - VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDisplayPlaneAlphaFlagBitsKHR; -typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; -typedef VkFlags VkDisplayModeCreateFlagsKHR; -typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; -typedef struct VkDisplayPropertiesKHR { - VkDisplayKHR display; - const char* displayName; - VkExtent2D physicalDimensions; - VkExtent2D physicalResolution; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkBool32 planeReorderPossible; - VkBool32 persistentContent; -} VkDisplayPropertiesKHR; +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; -typedef struct VkDisplayModeParametersKHR { - VkExtent2D visibleRegion; - uint32_t refreshRate; -} VkDisplayModeParametersKHR; -typedef struct VkDisplayModePropertiesKHR { - VkDisplayModeKHR displayMode; - VkDisplayModeParametersKHR parameters; -} VkDisplayModePropertiesKHR; -typedef struct VkDisplayModeCreateInfoKHR { +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; + +typedef struct VkDisplayPlaneProperties2KHR { VkStructureType sType; - const void* pNext; - VkDisplayModeCreateFlagsKHR flags; - VkDisplayModeParametersKHR parameters; -} VkDisplayModeCreateInfoKHR; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; -typedef struct VkDisplayPlaneCapabilitiesKHR { - VkDisplayPlaneAlphaFlagsKHR supportedAlpha; - VkOffset2D minSrcPosition; - VkOffset2D maxSrcPosition; - VkExtent2D minSrcExtent; - VkExtent2D maxSrcExtent; - VkOffset2D minDstPosition; - VkOffset2D maxDstPosition; - VkExtent2D minDstExtent; - VkExtent2D maxDstExtent; -} VkDisplayPlaneCapabilitiesKHR; +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; -typedef struct VkDisplayPlanePropertiesKHR { - VkDisplayKHR currentDisplay; - uint32_t currentStackIndex; -} VkDisplayPlanePropertiesKHR; +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; -typedef struct VkDisplaySurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplaySurfaceCreateFlagsKHR flags; - VkDisplayModeKHR displayMode; - uint32_t planeIndex; - uint32_t planeStackIndex; - VkSurfaceTransformFlagBitsKHR transform; - float globalAlpha; - VkDisplayPlaneAlphaFlagBitsKHR alphaMode; - VkExtent2D imageExtent; -} VkDisplaySurfaceCreateInfoKHR; +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPropertiesKHR* pProperties); + VkDisplayProperties2KHR* pProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPlanePropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( - VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t* pDisplayCount, - VkDisplayKHR* pDisplays); + VkDisplayPlaneProperties2KHR* pProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, - VkDisplayModePropertiesKHR* pProperties); + VkDisplayModeProperties2KHR* pProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDisplayModeKHR* pMode); + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR* pCapabilities); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( - VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); #endif -#define VK_KHR_display_swapchain 1 -#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 -#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" -typedef struct VkDisplayPresentInfoKHR { - VkStructureType sType; - const void* pNext; - VkRect2D srcRect; - VkRect2D dstRect; - VkBool32 persistent; -} VkDisplayPresentInfoKHR; +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" +typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; + + + +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); +typedef VkChromaLocation VkChromaLocationKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchains); -#endif +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; -#define VK_KHR_sampler_mirror_clamp_to_edge 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; -#define VK_KHR_multiview 1 -#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 -#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" -typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; -typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; -typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif -#define VK_KHR_get_physical_device_properties2 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" -typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; -typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; -typedef VkFormatProperties2 VkFormatProperties2KHR; +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; -typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); -typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); -typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif -typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; -typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" +#define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; -typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2* pFeatures); +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2* pProperties); -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2* pFormatProperties); +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, - VkImageFormatProperties2* pImageFormatProperties); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2* pQueueFamilyProperties); +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2* pMemoryProperties); -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties2* pProperties); -#endif +#define VK_KHR_shader_subgroup_extended_types 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" +typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; -#define VK_KHR_device_group 1 -#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 -#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" -typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; -typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" +typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; -typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; -typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; -typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" +typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; -typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; -typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; -typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; +#define VK_KHR_shader_clock 1 +#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 +#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" +typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupClock; + VkBool32 shaderDeviceClock; +} VkPhysicalDeviceShaderClockFeaturesKHR; + + + +#define VK_KHR_global_priority 1 +#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR 16U +#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 +#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" + +typedef enum VkQueueGlobalPriorityKHR { + VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024, + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueueGlobalPriorityKHR; +typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityKHR globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoKHR; -typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; -typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; +typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriorityKHR priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR]; +} VkQueueFamilyGlobalPriorityPropertiesKHR; -typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; -typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); -typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( - VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +#define VK_KHR_driver_properties 1 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE +#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE +typedef VkDriverId VkDriverIdKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( - VkCommandBuffer commandBuffer, - uint32_t deviceMask); +typedef VkConformanceVersion VkConformanceVersionKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( - VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); -#endif +typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; -#define VK_KHR_shader_draw_parameters 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" +#define VK_KHR_shader_float_controls 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 +#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; -#define VK_KHR_maintenance1 1 -#define VK_KHR_MAINTENANCE1_SPEC_VERSION 2 -#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" -typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; +typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; -typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolTrimFlags flags); -#endif +#define VK_KHR_depth_stencil_resolve 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; -#define VK_KHR_device_group_creation 1 -#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 -#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" -#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE -typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; +typedef VkResolveModeFlags VkResolveModeFlagsKHR; -typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; +typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( - VkInstance instance, - uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); -#endif -#define VK_KHR_external_memory_capabilities 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" -#define VK_LUID_SIZE_KHR VK_LUID_SIZE -typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; +#define VK_KHR_swapchain_mutable_format 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" -typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; -typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; +#define VK_KHR_timeline_semaphore 1 +#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 +#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" +typedef VkSemaphoreType VkSemaphoreTypeKHR; -typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; +typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; -typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; +typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; -typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; +typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; -typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; +typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; -typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; +typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; -typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; +typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; -typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; +typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, - VkExternalBufferProperties* pExternalBufferProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); #endif -#define VK_KHR_external_memory 1 -#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" -#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL -typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" +typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; -typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; -typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; -#define VK_KHR_external_memory_fd 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" -typedef struct VkImportMemoryFdInfoKHR { + +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" + +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { VkStructureType sType; const void* pNext; - VkExternalMemoryHandleTypeFlagBits handleType; - int fd; -} VkImportMemoryFdInfoKHR; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; -typedef struct VkMemoryFdPropertiesKHR { +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { VkStructureType sType; void* pNext; - uint32_t memoryTypeBits; -} VkMemoryFdPropertiesKHR; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; -typedef struct VkMemoryGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBits handleType; -} VkMemoryGetFdInfoKHR; +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( - VkDevice device, - const VkMemoryGetFdInfoKHR* pGetFdInfo, - int* pFd); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - int fd, - VkMemoryFdPropertiesKHR* pMemoryFdProperties); +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( + VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); #endif -#define VK_KHR_external_semaphore_capabilities 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" -typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; +#define VK_KHR_spirv_1_4 1 +#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 +#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" -typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; -typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; +#define VK_KHR_surface_protected_capabilities 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" +typedef struct VkSurfaceProtectedCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + VkBool32 supportsProtected; +} VkSurfaceProtectedCapabilitiesKHR; -typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; -typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; -typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; +#define VK_KHR_separate_depth_stencil_layouts 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" +typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + +typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; + +typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + + +#define VK_KHR_present_wait 1 +#define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 +#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" +typedef struct VkPhysicalDevicePresentWaitFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentWait; +} VkPhysicalDevicePresentWaitFeaturesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, - VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout); #endif -#define VK_KHR_external_semaphore 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" -typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; +#define VK_KHR_uniform_buffer_standard_layout 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" +typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; -typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; -typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; +#define VK_KHR_buffer_device_address 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" +typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; -#define VK_KHR_external_semaphore_fd 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" -typedef struct VkImportSemaphoreFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlags flags; - VkExternalSemaphoreHandleTypeFlagBits handleType; - int fd; -} VkImportSemaphoreFdInfoKHR; +typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; -typedef struct VkSemaphoreGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBits handleType; -} VkSemaphoreGetFdInfoKHR; +typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( VkDevice device, - const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + const VkBufferDeviceAddressInfo* pInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, - const VkSemaphoreGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif + const VkBufferDeviceAddressInfo* pInfo); +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif -#define VK_KHR_push_descriptor 1 -#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 -#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" -typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t maxPushDescriptors; -} VkPhysicalDevicePushDescriptorPropertiesKHR; -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites); +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( - VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void* pData); +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); #endif -#define VK_KHR_shader_float16_int8 1 -#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 -#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" -typedef struct VkPhysicalDeviceShaderFloat16Int8FeaturesKHR { +#define VK_KHR_pipeline_executable_properties 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" + +typedef enum VkPipelineExecutableStatisticFormatKHR { + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPipelineExecutableStatisticFormatKHR; +typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { VkStructureType sType; void* pNext; - VkBool32 shaderFloat16; - VkBool32 shaderInt8; -} VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; + VkBool32 pipelineExecutableInfo; +} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; -typedef VkPhysicalDeviceShaderFloat16Int8FeaturesKHR VkPhysicalDeviceFloat16Int8FeaturesKHR; +typedef struct VkPipelineInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; +} VkPipelineInfoKHR; +typedef struct VkPipelineExecutablePropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags stages; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t subgroupSize; +} VkPipelineExecutablePropertiesKHR; +typedef struct VkPipelineExecutableInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; + uint32_t executableIndex; +} VkPipelineExecutableInfoKHR; -#define VK_KHR_16bit_storage 1 -#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" -typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; +typedef union VkPipelineExecutableStatisticValueKHR { + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +} VkPipelineExecutableStatisticValueKHR; +typedef struct VkPipelineExecutableStatisticKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkPipelineExecutableStatisticFormatKHR format; + VkPipelineExecutableStatisticValueKHR value; +} VkPipelineExecutableStatisticKHR; +typedef struct VkPipelineExecutableInternalRepresentationKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkBool32 isText; + size_t dataSize; + void* pData; +} VkPipelineExecutableInternalRepresentationKHR; -#define VK_KHR_incremental_present 1 -#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 -#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" -typedef struct VkRectLayerKHR { - VkOffset2D offset; - VkExtent2D extent; - uint32_t layer; -} VkRectLayerKHR; +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); -typedef struct VkPresentRegionKHR { - uint32_t rectangleCount; - const VkRectLayerKHR* pRectangles; -} VkPresentRegionKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( + VkDevice device, + const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties); -typedef struct VkPresentRegionsKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentRegionKHR* pRegions; -} VkPresentRegionsKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#endif -#define VK_KHR_descriptor_update_template 1 -typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; +#define VK_KHR_shader_integer_dot_product 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" +typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR; -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" -typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; +typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR; -typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; -typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; -typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( - VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( - VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks* pAllocator); +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( - VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void* pData); -#endif +#define VK_KHR_present_id 1 +#define VK_KHR_PRESENT_ID_SPEC_VERSION 1 +#define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" +typedef struct VkPresentIdKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint64_t* pPresentIds; +} VkPresentIdKHR; -#define VK_KHR_imageless_framebuffer 1 -#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 -#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" -typedef struct VkPhysicalDeviceImagelessFramebufferFeaturesKHR { +typedef struct VkPhysicalDevicePresentIdFeaturesKHR { VkStructureType sType; void* pNext; - VkBool32 imagelessFramebuffer; -} VkPhysicalDeviceImagelessFramebufferFeaturesKHR; + VkBool32 presentId; +} VkPhysicalDevicePresentIdFeaturesKHR; -typedef struct VkFramebufferAttachmentImageInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageCreateFlags flags; - VkImageUsageFlags usage; - uint32_t width; - uint32_t height; - uint32_t layerCount; - uint32_t viewFormatCount; - const VkFormat* pViewFormats; -} VkFramebufferAttachmentImageInfoKHR; -typedef struct VkFramebufferAttachmentsCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t attachmentImageInfoCount; - const VkFramebufferAttachmentImageInfoKHR* pAttachmentImageInfos; -} VkFramebufferAttachmentsCreateInfoKHR; -typedef struct VkRenderPassAttachmentBeginInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t attachmentCount; - const VkImageView* pAttachments; -} VkRenderPassAttachmentBeginInfoKHR; +#define VK_KHR_synchronization2 1 +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR; +typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR; +typedef VkAccessFlags2 VkAccessFlags2KHR; -#define VK_KHR_create_renderpass2 1 -#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 -#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" -typedef struct VkAttachmentDescription2KHR { - VkStructureType sType; - const void* pNext; - VkAttachmentDescriptionFlags flags; - VkFormat format; - VkSampleCountFlagBits samples; - VkAttachmentLoadOp loadOp; - VkAttachmentStoreOp storeOp; - VkAttachmentLoadOp stencilLoadOp; - VkAttachmentStoreOp stencilStoreOp; - VkImageLayout initialLayout; - VkImageLayout finalLayout; -} VkAttachmentDescription2KHR; +typedef VkAccessFlagBits2 VkAccessFlagBits2KHR; -typedef struct VkAttachmentReference2KHR { - VkStructureType sType; - const void* pNext; - uint32_t attachment; - VkImageLayout layout; - VkImageAspectFlags aspectMask; -} VkAttachmentReference2KHR; +typedef VkSubmitFlagBits VkSubmitFlagBitsKHR; -typedef struct VkSubpassDescription2KHR { - VkStructureType sType; - const void* pNext; - VkSubpassDescriptionFlags flags; - VkPipelineBindPoint pipelineBindPoint; - uint32_t viewMask; - uint32_t inputAttachmentCount; - const VkAttachmentReference2KHR* pInputAttachments; - uint32_t colorAttachmentCount; - const VkAttachmentReference2KHR* pColorAttachments; - const VkAttachmentReference2KHR* pResolveAttachments; - const VkAttachmentReference2KHR* pDepthStencilAttachment; - uint32_t preserveAttachmentCount; - const uint32_t* pPreserveAttachments; -} VkSubpassDescription2KHR; - -typedef struct VkSubpassDependency2KHR { - VkStructureType sType; - const void* pNext; - uint32_t srcSubpass; - uint32_t dstSubpass; - VkPipelineStageFlags srcStageMask; - VkPipelineStageFlags dstStageMask; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkDependencyFlags dependencyFlags; - int32_t viewOffset; -} VkSubpassDependency2KHR; +typedef VkSubmitFlags VkSubmitFlagsKHR; -typedef struct VkRenderPassCreateInfo2KHR { - VkStructureType sType; - const void* pNext; - VkRenderPassCreateFlags flags; - uint32_t attachmentCount; - const VkAttachmentDescription2KHR* pAttachments; - uint32_t subpassCount; - const VkSubpassDescription2KHR* pSubpasses; - uint32_t dependencyCount; - const VkSubpassDependency2KHR* pDependencies; - uint32_t correlatedViewMaskCount; - const uint32_t* pCorrelatedViewMasks; -} VkRenderPassCreateInfo2KHR; - -typedef struct VkSubpassBeginInfoKHR { - VkStructureType sType; - const void* pNext; - VkSubpassContents contents; -} VkSubpassBeginInfoKHR; +typedef VkMemoryBarrier2 VkMemoryBarrier2KHR; -typedef struct VkSubpassEndInfoKHR { - VkStructureType sType; - const void* pNext; -} VkSubpassEndInfoKHR; +typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR; + +typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR; + +typedef VkDependencyInfo VkDependencyInfoKHR; + +typedef VkSubmitInfo2 VkSubmitInfo2KHR; + +typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR; + +typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; + +typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); -typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo); -typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo); -typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo); +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( - VkDevice device, - const VkRenderPassCreateInfo2KHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass); +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo* pRenderPassBegin, - const VkSubpassBeginInfoKHR* pSubpassBeginInfo); + VkEvent event, + VkPipelineStageFlags2 stageMask); -VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfoKHR* pSubpassBeginInfo, - const VkSubpassEndInfoKHR* pSubpassEndInfo); + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); -VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, - const VkSubpassEndInfoKHR* pSubpassEndInfo); -#endif + const VkDependencyInfo* pDependencyInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); -#define VK_KHR_shared_presentable_image 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" -typedef struct VkSharedPresentSurfaceCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkImageUsageFlags sharedPresentSupportedUsageFlags; -} VkSharedPresentSurfaceCapabilitiesKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( - VkDevice device, - VkSwapchainKHR swapchain); +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); #endif -#define VK_KHR_external_fence_capabilities 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" -typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; +#define VK_KHR_shader_subgroup_uniform_control_flow 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" +typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupUniformControlFlow; +} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; -typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; -typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; -typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; -typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; -typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, - VkExternalFenceProperties* pExternalFenceProperties); -#endif -#define VK_KHR_external_fence 1 -#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" -typedef VkFenceImportFlags VkFenceImportFlagsKHR; +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR; -typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; +typedef VkCopyImageInfo2 VkCopyImageInfo2KHR; -typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; +typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR; +typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR; +typedef VkBlitImageInfo2 VkBlitImageInfo2KHR; -#define VK_KHR_external_fence_fd 1 -#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" -typedef struct VkImportFenceFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkFenceImportFlags flags; - VkExternalFenceHandleTypeFlagBits handleType; - int fd; -} VkImportFenceFdInfoKHR; +typedef VkResolveImageInfo2 VkResolveImageInfo2KHR; -typedef struct VkFenceGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBits handleType; -} VkFenceGetFdInfoKHR; +typedef VkBufferCopy2 VkBufferCopy2KHR; -typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkImageCopy2 VkImageCopy2KHR; + +typedef VkImageBlit2 VkImageBlit2KHR; + +typedef VkBufferImageCopy2 VkBufferImageCopy2KHR; + +typedef VkImageResolve2 VkImageResolve2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( - VkDevice device, - const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( - VkDevice device, - const VkFenceGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); -#define VK_KHR_maintenance2 1 -#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2" -typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); -typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); -typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); +#endif -typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; -typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; +#define VK_KHR_format_feature_flags2 1 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 1 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" +typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR; -typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; +typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR; -typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; +typedef VkFormatProperties3 VkFormatProperties3KHR; -#define VK_KHR_get_surface_capabilities2 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" -typedef struct VkPhysicalDeviceSurfaceInfo2KHR { - VkStructureType sType; - const void* pNext; - VkSurfaceKHR surface; -} VkPhysicalDeviceSurfaceInfo2KHR; +#define VK_KHR_maintenance4 1 +#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" +typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR; -typedef struct VkSurfaceCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceCapabilitiesKHR surfaceCapabilities; -} VkSurfaceCapabilities2KHR; +typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR; -typedef struct VkSurfaceFormat2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceFormatKHR surfaceFormat; -} VkSurfaceFormat2KHR; +typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); +typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); -#endif +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -#define VK_KHR_variable_pointers 1 -#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 -#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" -typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif -typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; -#define VK_KHR_get_display_properties2 1 -#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" -typedef struct VkDisplayProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPropertiesKHR displayProperties; -} VkDisplayProperties2KHR; +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); -typedef struct VkDisplayPlaneProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPlanePropertiesKHR displayPlaneProperties; -} VkDisplayPlaneProperties2KHR; +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; -typedef struct VkDisplayModeProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayModePropertiesKHR displayModeProperties; -} VkDisplayModeProperties2KHR; +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); -typedef struct VkDisplayPlaneInfo2KHR { - VkStructureType sType; - const void* pNext; - VkDisplayModeKHR mode; - uint32_t planeIndex; -} VkDisplayPlaneInfo2KHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); -typedef struct VkDisplayPlaneCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPlaneCapabilitiesKHR capabilities; -} VkDisplayPlaneCapabilities2KHR; +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayProperties2KHR* pProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPlaneProperties2KHR* pProperties); +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t* pPropertyCount, - VkDisplayModeProperties2KHR* pProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR* pCapabilities); -#endif +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" -#define VK_KHR_dedicated_allocation 1 -#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 -#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" -typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" -typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; -#define VK_KHR_storage_buffer_storage_class 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" -#define VK_KHR_relaxed_block_layout 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" -#define VK_KHR_get_memory_requirements2 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" -typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" -typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; -typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; -typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; -typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( VkDevice device, - const VkImageMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( VkDevice device, - const VkBufferMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( - VkDevice device, - const VkImageSparseMemoryRequirementsInfo2* pInfo, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); #endif -#define VK_KHR_image_format_list 1 -#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 -#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" -typedef struct VkImageFormatListCreateInfoKHR { +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" +typedef struct VkDedicatedAllocationImageCreateInfoNV { VkStructureType sType; const void* pNext; - uint32_t viewFormatCount; - const VkFormat* pViewFormats; -} VkImageFormatListCreateInfoKHR; - + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; -#define VK_KHR_sampler_ycbcr_conversion 1 -typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; -#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 -#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" -typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; -typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; -typedef VkChromaLocation VkChromaLocationKHR; +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; -typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; -typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; -typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); -typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); -typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); -typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); -typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); -typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( - VkDevice device, - const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSamplerYcbcrConversion* pYcbcrConversion); +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); -VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( - VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); #endif -#define VK_KHR_bind_memory2 1 -#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 -#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" -typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; -typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; -typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); -typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( VkDevice device, - uint32_t bindInfoCount, - const VkBindBufferMemoryInfo* pBindInfos); + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( VkDevice device, - uint32_t bindInfoCount, - const VkBindImageMemoryInfo* pBindInfos); + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); #endif -#define VK_KHR_maintenance3 1 -#define VK_KHR_MAINTENANCE3_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE3_EXTENSION_NAME "VK_KHR_maintenance3" -typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; +#define VK_NVX_image_view_handle 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 +#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" +typedef struct VkImageViewHandleInfoNVX { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkDescriptorType descriptorType; + VkSampler sampler; +} VkImageViewHandleInfoNVX; -typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; -typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); +typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( +VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( VkDevice device, - const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - VkDescriptorSetLayoutSupport* pSupport); + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); #endif -#define VK_KHR_draw_indirect_count 1 -#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 -#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -6190,7 +9943,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( uint32_t maxDrawCount, uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -6201,3663 +9954,4358 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( #endif -#define VK_KHR_8bit_storage 1 -#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" -typedef struct VkPhysicalDevice8BitStorageFeaturesKHR { +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + + +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 storageBuffer8BitAccess; - VkBool32 uniformAndStorageBuffer8BitAccess; - VkBool32 storagePushConstant8; -} VkPhysicalDevice8BitStorageFeaturesKHR; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; -#define VK_KHR_shader_atomic_int64 1 -#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 -#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" -typedef struct VkPhysicalDeviceShaderAtomicInt64FeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 shaderBufferInt64Atomics; - VkBool32 shaderSharedInt64Atomics; -} VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" -#define VK_KHR_driver_properties 1 -#define VK_MAX_DRIVER_NAME_SIZE_KHR 256 -#define VK_MAX_DRIVER_INFO_SIZE_KHR 256 -#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 -#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; -typedef enum VkDriverIdKHR { - VK_DRIVER_ID_AMD_PROPRIETARY_KHR = 1, - VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = 2, - VK_DRIVER_ID_MESA_RADV_KHR = 3, - VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = 4, - VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = 5, - VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = 6, - VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = 7, - VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = 8, - VK_DRIVER_ID_ARM_PROPRIETARY_KHR = 9, - VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = 10, - VK_DRIVER_ID_GGP_PROPRIETARY_KHR = 11, - VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = 12, - VK_DRIVER_ID_BEGIN_RANGE_KHR = VK_DRIVER_ID_AMD_PROPRIETARY_KHR, - VK_DRIVER_ID_END_RANGE_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR, - VK_DRIVER_ID_RANGE_SIZE_KHR = (VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR - VK_DRIVER_ID_AMD_PROPRIETARY_KHR + 1), - VK_DRIVER_ID_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDriverIdKHR; -typedef struct VkConformanceVersionKHR { - uint8_t major; - uint8_t minor; - uint8_t subminor; - uint8_t patch; -} VkConformanceVersionKHR; +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; -typedef struct VkPhysicalDeviceDriverPropertiesKHR { - VkStructureType sType; - void* pNext; - VkDriverIdKHR driverID; - char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR]; - char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR]; - VkConformanceVersionKHR conformanceVersion; -} VkPhysicalDeviceDriverPropertiesKHR; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif -#define VK_KHR_shader_float_controls 1 -#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 -#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; -typedef enum VkShaderFloatControlsIndependenceKHR { - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = 0, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = 1, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = 2, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_BEGIN_RANGE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_END_RANGE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR, - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_RANGE_SIZE_KHR = (VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR + 1), - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkShaderFloatControlsIndependenceKHR; -typedef struct VkPhysicalDeviceFloatControlsPropertiesKHR { - VkStructureType sType; - void* pNext; - VkShaderFloatControlsIndependenceKHR denormBehaviorIndependence; - VkShaderFloatControlsIndependenceKHR roundingModeIndependence; - VkBool32 shaderSignedZeroInfNanPreserveFloat16; - VkBool32 shaderSignedZeroInfNanPreserveFloat32; - VkBool32 shaderSignedZeroInfNanPreserveFloat64; - VkBool32 shaderDenormPreserveFloat16; - VkBool32 shaderDenormPreserveFloat32; - VkBool32 shaderDenormPreserveFloat64; - VkBool32 shaderDenormFlushToZeroFloat16; - VkBool32 shaderDenormFlushToZeroFloat32; - VkBool32 shaderDenormFlushToZeroFloat64; - VkBool32 shaderRoundingModeRTEFloat16; - VkBool32 shaderRoundingModeRTEFloat32; - VkBool32 shaderRoundingModeRTEFloat64; - VkBool32 shaderRoundingModeRTZFloat16; - VkBool32 shaderRoundingModeRTZFloat32; - VkBool32 shaderRoundingModeRTZFloat64; -} VkPhysicalDeviceFloatControlsPropertiesKHR; +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; -#define VK_KHR_depth_stencil_resolve 1 -#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 -#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" -typedef enum VkResolveModeFlagBitsKHR { - VK_RESOLVE_MODE_NONE_KHR = 0, - VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = 0x00000001, - VK_RESOLVE_MODE_AVERAGE_BIT_KHR = 0x00000002, - VK_RESOLVE_MODE_MIN_BIT_KHR = 0x00000004, - VK_RESOLVE_MODE_MAX_BIT_KHR = 0x00000008, - VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkResolveModeFlagBitsKHR; -typedef VkFlags VkResolveModeFlagsKHR; -typedef struct VkSubpassDescriptionDepthStencilResolveKHR { - VkStructureType sType; - const void* pNext; - VkResolveModeFlagBitsKHR depthResolveMode; - VkResolveModeFlagBitsKHR stencilResolveMode; - const VkAttachmentReference2KHR* pDepthStencilResolveAttachment; -} VkSubpassDescriptionDepthStencilResolveKHR; +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; -typedef struct VkPhysicalDeviceDepthStencilResolvePropertiesKHR { - VkStructureType sType; - void* pNext; - VkResolveModeFlagsKHR supportedDepthResolveModes; - VkResolveModeFlagsKHR supportedStencilResolveModes; - VkBool32 independentResolveNone; - VkBool32 independentResolve; -} VkPhysicalDeviceDepthStencilResolvePropertiesKHR; +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" -#define VK_KHR_swapchain_mutable_format 1 -#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 -#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" -#define VK_KHR_vulkan_memory_model 1 -#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 -#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" -typedef struct VkPhysicalDeviceVulkanMemoryModelFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 vulkanMemoryModel; - VkBool32 vulkanMemoryModelDeviceScope; - VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; -} VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; +#define VK_EXT_texture_compression_astc_hdr 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" +typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; -#define VK_KHR_surface_protected_capabilities 1 -#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" -typedef struct VkSurfaceProtectedCapabilitiesKHR { + +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" +typedef struct VkImageViewASTCDecodeModeEXT { VkStructureType sType; const void* pNext; - VkBool32 supportsProtected; -} VkSurfaceProtectedCapabilitiesKHR; - - + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; -#define VK_KHR_uniform_buffer_standard_layout 1 -#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" -typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR { +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 uniformBufferStandardLayout; -} VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; -#define VK_KHR_pipeline_executable_properties 1 -#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 -#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" -typedef enum VkPipelineExecutableStatisticFormatKHR { - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BEGIN_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_END_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_RANGE_SIZE_KHR = (VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR + 1), - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPipelineExecutableStatisticFormatKHR; -typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 pipelineExecutableInfo; -} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; -typedef struct VkPipelineInfoKHR { +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { VkStructureType sType; - const void* pNext; - VkPipeline pipeline; -} VkPipelineInfoKHR; - -typedef struct VkPipelineExecutablePropertiesKHR { - VkStructureType sType; - void* pNext; - VkShaderStageFlags stages; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - uint32_t subgroupSize; -} VkPipelineExecutablePropertiesKHR; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; -typedef struct VkPipelineExecutableInfoKHR { +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { VkStructureType sType; const void* pNext; - VkPipeline pipeline; - uint32_t executableIndex; -} VkPipelineExecutableInfoKHR; - -typedef union VkPipelineExecutableStatisticValueKHR { - VkBool32 b32; - int64_t i64; - uint64_t u64; - double f64; -} VkPipelineExecutableStatisticValueKHR; - -typedef struct VkPipelineExecutableStatisticKHR { - VkStructureType sType; - void* pNext; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - VkPipelineExecutableStatisticFormatKHR format; - VkPipelineExecutableStatisticValueKHR value; -} VkPipelineExecutableStatisticKHR; - -typedef struct VkPipelineExecutableInternalRepresentationKHR { - VkStructureType sType; - void* pNext; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - VkBool32 isText; - size_t dataSize; - void* pData; -} VkPipelineExecutableInternalRepresentationKHR; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( - VkDevice device, - const VkPipelineInfoKHR* pPipelineInfo, - uint32_t* pExecutableCount, - VkPipelineExecutablePropertiesKHR* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( - VkDevice device, - const VkPipelineExecutableInfoKHR* pExecutableInfo, - uint32_t* pStatisticCount, - VkPipelineExecutableStatisticKHR* pStatistics); +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( - VkDevice device, - const VkPipelineExecutableInfoKHR* pExecutableInfo, - uint32_t* pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); #endif -#define VK_EXT_debug_report 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) -#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9 -#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; -typedef enum VkDebugReportObjectTypeEXT { - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, - VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, - VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, - VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, - VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, - VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, - VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, - VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, - VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, - VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, - VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), - VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportObjectTypeEXT; +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; -typedef enum VkDebugReportFlagBitsEXT { - VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, - VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, - VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, - VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, - VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, - VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportFlagBitsEXT; -typedef VkFlags VkDebugReportFlagsEXT; -typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage, - void* pUserData); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); -typedef struct VkDebugReportCallbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportFlagsEXT flags; - PFN_vkDebugReportCallbackEXT pfnCallback; - void* pUserData; -} VkDebugReportCallbackCreateInfoEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif -typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); -typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( - VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDebugReportCallbackEXT* pCallback); +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( - VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( - VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage); +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); #endif -#define VK_NV_glsl_shader 1 -#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 -#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; -#define VK_EXT_depth_range_unrestricted 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; -#define VK_IMG_filter_cubic 1 -#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 -#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; -#define VK_AMD_rasterization_order 1 -#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 -#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; -typedef enum VkRasterizationOrderAMD { - VK_RASTERIZATION_ORDER_STRICT_AMD = 0, - VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, - VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, - VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, - VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), - VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF -} VkRasterizationOrderAMD; -typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { - VkStructureType sType; - const void* pNext; - VkRasterizationOrderAMD rasterizationOrder; -} VkPipelineRasterizationStateRasterizationOrderAMD; +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); -#define VK_AMD_shader_trinary_minmax 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif -#define VK_AMD_shader_explicit_vertex_parameter 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; -#define VK_EXT_debug_marker 1 -#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 -#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" -typedef struct VkDebugMarkerObjectNameInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - const char* pObjectName; -} VkDebugMarkerObjectNameInfoEXT; +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; -typedef struct VkDebugMarkerObjectTagInfoEXT { +typedef struct VkPresentTimesInfoGOOGLE { VkStructureType sType; const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugMarkerObjectTagInfoEXT; - -typedef struct VkDebugMarkerMarkerInfoEXT { - VkStructureType sType; - const void* pNext; - const char* pMarkerName; - float color[4]; -} VkDebugMarkerMarkerInfoEXT; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( VkDevice device, - const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( VkDevice device, - const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( - VkCommandBuffer commandBuffer); +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -#endif +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" -#define VK_AMD_gcn_shader 1 -#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 -#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME -#define VK_NV_dedicated_allocation 1 -#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" -typedef struct VkDedicatedAllocationImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationImageCreateInfoNV; -typedef struct VkDedicatedAllocationBufferCreateInfoNV { +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationBufferCreateInfoNV; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; -typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkImage image; - VkBuffer buffer; -} VkDedicatedAllocationMemoryAllocateInfoNV; +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" -#define VK_EXT_transform_feedback 1 -#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 -#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" -typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; -typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 transformFeedback; - VkBool32 geometryStreams; -} VkPhysicalDeviceTransformFeedbackFeaturesEXT; +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; -typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { VkStructureType sType; void* pNext; - uint32_t maxTransformFeedbackStreams; - uint32_t maxTransformFeedbackBuffers; - VkDeviceSize maxTransformFeedbackBufferSize; - uint32_t maxTransformFeedbackStreamDataSize; - uint32_t maxTransformFeedbackBufferDataSize; - uint32_t maxTransformFeedbackBufferDataStride; - VkBool32 transformFeedbackQueries; - VkBool32 transformFeedbackStreamsLinesTriangles; - VkBool32 transformFeedbackRasterizationStreamSelect; - VkBool32 transformFeedbackDraw; -} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; -typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationStateStreamCreateFlagsEXT flags; - uint32_t rasterizationStream; -} VkPipelineRasterizationStateStreamCreateInfoEXT; +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; -typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); -typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); -typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets, - const VkDeviceSize* pSizes); + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( - VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets); -VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( - VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets); +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +#define VK_EXT_depth_clip_enable 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" +typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipEnable; +} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + +typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; + VkBool32 depthClipEnable; +} VkPipelineRasterizationDepthClipStateCreateInfoEXT; + -VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - VkQueryControlFlags flags, - uint32_t index); -VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - uint32_t index); +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( - VkCommandBuffer commandBuffer, - uint32_t instanceCount, - uint32_t firstInstance, - VkBuffer counterBuffer, - VkDeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride); -#endif +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 2 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; -#define VK_NVX_image_view_handle 1 -#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 1 -#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" -typedef struct VkImageViewHandleInfoNVX { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkDescriptorType descriptorType; - VkSampler sampler; -} VkImageViewHandleInfoNVX; +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; -typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( VkDevice device, - const VkImageViewHandleInfoNVX* pInfo); + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); #endif -#define VK_AMD_draw_indirect_count 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 -#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -#endif +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) -#define VK_AMD_negative_viewport_height 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; -#define VK_AMD_gpu_shader_half_float 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; -#define VK_AMD_shader_ballot 1 -#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 -#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + const VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + const VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + const VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); -#define VK_AMD_texture_gather_bias_lod 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" -typedef struct VkTextureLODGatherFormatPropertiesAMD { +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { VkStructureType sType; - void* pNext; - VkBool32 supportsTextureGatherLODBiasAMD; -} VkTextureLODGatherFormatPropertiesAMD; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); -#define VK_AMD_shader_info 1 -#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 -#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); -typedef enum VkShaderInfoTypeAMD { - VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, - VK_SHADER_INFO_TYPE_BINARY_AMD = 1, - VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, - VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD, - VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, - VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1), - VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF -} VkShaderInfoTypeAMD; -typedef struct VkShaderResourceUsageAMD { - uint32_t numUsedVgprs; - uint32_t numUsedSgprs; - uint32_t ldsSizePerLocalWorkGroup; - size_t ldsUsageSizeInBytes; - size_t scratchMemUsageInBytes; -} VkShaderResourceUsageAMD; +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); -typedef struct VkShaderStatisticsInfoAMD { - VkShaderStageFlags shaderStageMask; - VkShaderResourceUsageAMD resourceUsage; - uint32_t numPhysicalVgprs; - uint32_t numPhysicalSgprs; - uint32_t numAvailableVgprs; - uint32_t numAvailableSgprs; - uint32_t computeWorkGroupSize[3]; -} VkShaderStatisticsInfoAMD; +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( - VkDevice device, - VkPipeline pipeline, - VkShaderStageFlagBits shaderStage, - VkShaderInfoTypeAMD infoType, - size_t* pInfoSize, - void* pInfo); +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); #endif -#define VK_AMD_shader_image_load_store_lod 1 -#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 -#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" +typedef VkSamplerReductionMode VkSamplerReductionModeEXT; +typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; -#define VK_NV_corner_sampled_image 1 -#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 -#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" -typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 cornerSampledImage; -} VkPhysicalDeviceCornerSampledImageFeaturesNV; +typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; -#define VK_IMG_format_pvrtc 1 -#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 -#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" -#define VK_NV_external_memory_capabilities 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" -typedef enum VkExternalMemoryHandleTypeFlagBitsNV { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsNV; -typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; -typedef enum VkExternalMemoryFeatureFlagBitsNV { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsNV; -typedef VkFlags VkExternalMemoryFeatureFlagsNV; -typedef struct VkExternalImageFormatPropertiesNV { - VkImageFormatProperties imageFormatProperties; - VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; -} VkExternalImageFormatPropertiesNV; +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); -#endif +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" +typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT; +typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT; -#define VK_NV_external_memory 1 -#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" -typedef struct VkExternalMemoryImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExternalMemoryImageCreateInfoNV; +typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT; -typedef struct VkExportMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExportMemoryAllocateInfoNV; +typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT; -#define VK_EXT_validation_flags 1 -#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" -typedef enum VkValidationCheckEXT { - VK_VALIDATION_CHECK_ALL_EXT = 0, - VK_VALIDATION_CHECK_SHADERS_EXT = 1, - VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, - VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, - VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), - VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCheckEXT; -typedef struct VkValidationFlagsEXT { - VkStructureType sType; - const void* pNext; - uint32_t disabledValidationCheckCount; - const VkValidationCheckEXT* pDisabledValidationChecks; -} VkValidationFlagsEXT; +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; -#define VK_EXT_shader_subgroup_ballot 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; -#define VK_EXT_shader_subgroup_vote 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; -#define VK_EXT_texture_compression_astc_hdr 1 -#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 -#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" -typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { VkStructureType sType; - const void* pNext; - VkBool32 textureCompressionASTC_HDR; -} VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" -#define VK_EXT_astc_decode_mode 1 -#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 -#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" -typedef struct VkImageViewASTCDecodeModeEXT { +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { VkStructureType sType; - const void* pNext; - VkFormat decodeMode; -} VkImageViewASTCDecodeModeEXT; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; -typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 decodeModeSharedExponent; -} VkPhysicalDeviceASTCDecodeFeaturesEXT; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; -#define VK_EXT_conditional_rendering 1 -#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 -#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" -typedef enum VkConditionalRenderingFlagBitsEXT { - VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, - VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkConditionalRenderingFlagBitsEXT; -typedef VkFlags VkConditionalRenderingFlagsEXT; -typedef struct VkConditionalRenderingBeginInfoEXT { - VkStructureType sType; - const void* pNext; - VkBuffer buffer; - VkDeviceSize offset; - VkConditionalRenderingFlagsEXT flags; -} VkConditionalRenderingBeginInfoEXT; -typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { +#define VK_NV_shader_sm_builtins 1 +#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 +#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" +typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { VkStructureType sType; void* pNext; - VkBool32 conditionalRendering; - VkBool32 inheritedConditionalRendering; -} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + uint32_t shaderSMCount; + uint32_t shaderWarpsPerSM; +} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; -typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { +typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { VkStructureType sType; - const void* pNext; - VkBool32 conditionalRenderingEnable; -} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + void* pNext; + VkBool32 shaderSMBuiltins; +} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; -typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); -typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( - VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); -VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( - VkCommandBuffer commandBuffer); -#endif +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; -#define VK_NVX_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) -#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 -#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" - -typedef enum VkIndirectCommandsTokenTypeNVX { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNVX; - -typedef enum VkObjectEntryTypeNVX { - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, - VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, - VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, - VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, - VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, - VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, - VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), - VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryTypeNVX; - -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNVX; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - -typedef enum VkObjectEntryUsageFlagBitsNVX { - VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, - VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, - VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryUsageFlagBitsNVX; -typedef VkFlags VkObjectEntryUsageFlagsNVX; -typedef struct VkDeviceGeneratedCommandsFeaturesNVX { +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { VkStructureType sType; const void* pNext; - VkBool32 computeBindingPointSupport; -} VkDeviceGeneratedCommandsFeaturesNVX; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; -typedef struct VkDeviceGeneratedCommandsLimitsNVX { +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { VkStructureType sType; const void* pNext; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; -} VkDeviceGeneratedCommandsLimitsNVX; - -typedef struct VkIndirectCommandsTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsTokenNVX; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; -typedef struct VkIndirectCommandsLayoutTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; -} VkIndirectCommandsLayoutTokenNVX; +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; -typedef struct VkIndirectCommandsLayoutCreateInfoNVX { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkIndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNVX* pTokens; -} VkIndirectCommandsLayoutCreateInfoNVX; +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; -typedef struct VkCmdProcessCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; - uint32_t maxSequencesCount; - VkCommandBuffer targetCommandBuffer; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkCmdProcessCommandsInfoNVX; +typedef struct VkDrmFormatModifierProperties2EXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags2 drmFormatModifierTilingFeatures; +} VkDrmFormatModifierProperties2EXT; -typedef struct VkCmdReserveSpaceForCommandsInfoNVX { - VkStructureType sType; - const void* pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkCmdReserveSpaceForCommandsInfoNVX; +typedef struct VkDrmFormatModifierPropertiesList2EXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierProperties2EXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesList2EXT; -typedef struct VkObjectTableCreateInfoNVX { - VkStructureType sType; - const void* pNext; - uint32_t objectCount; - const VkObjectEntryTypeNVX* pObjectEntryTypes; - const uint32_t* pObjectEntryCounts; - const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; -} VkObjectTableCreateInfoNVX; - -typedef struct VkObjectTableEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; -} VkObjectTableEntryNVX; - -typedef struct VkObjectTablePipelineEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipeline pipeline; -} VkObjectTablePipelineEntryNVX; - -typedef struct VkObjectTableDescriptorSetEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkDescriptorSet descriptorSet; -} VkObjectTableDescriptorSetEntryNVX; - -typedef struct VkObjectTableVertexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; -} VkObjectTableVertexBufferEntryNVX; - -typedef struct VkObjectTableIndexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; - VkIndexType indexType; -} VkObjectTableIndexBufferEntryNVX; - -typedef struct VkObjectTablePushConstantEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkShaderStageFlags stageFlags; -} VkObjectTablePushConstantEntryNVX; - -typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); -typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); -typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator); +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; +typedef VkFlags VkValidationCacheCreateFlagsEXT; +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( VkDevice device, - const VkObjectTableCreateInfoNVX* pCreateInfo, + const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkObjectTableNVX* pObjectTable); + VkValidationCacheEXT* pValidationCache); -VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( VkDevice device, - VkObjectTableNVX objectTable, + VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX* const* ppObjectTableEntries, - const uint32_t* pObjectIndices); + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); -VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX* pObjectEntryTypes, - const uint32_t* pObjectIndices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, - VkDeviceGeneratedCommandsLimitsNVX* pLimits); + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); #endif -#define VK_NV_clip_space_w_scaling 1 -#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 -#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" -typedef struct VkViewportWScalingNV { - float xcoeff; - float ycoeff; -} VkViewportWScalingNV; +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" +typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; -typedef struct VkPipelineViewportWScalingStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 viewportWScalingEnable; - uint32_t viewportCount; - const VkViewportWScalingNV* pViewportWScalings; -} VkPipelineViewportWScalingStateCreateInfoNV; +typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; + +typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, - const VkViewportWScalingNV* pViewportWScalings); -#endif - - -#define VK_EXT_direct_mode_display 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" -typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + const VkShadingRatePaletteNV* pShadingRatePalettes); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display); +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); #endif -#define VK_EXT_display_surface_counter 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" - -typedef enum VkSurfaceCounterFlagBitsEXT { - VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, - VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSurfaceCounterFlagBitsEXT; -typedef VkFlags VkSurfaceCounterFlagsEXT; -typedef struct VkSurfaceCapabilities2EXT { - VkStructureType sType; - void* pNext; - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; - VkSurfaceCounterFlagsEXT supportedSurfaceCounters; -} VkSurfaceCapabilities2EXT; - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT* pSurfaceCapabilities); -#endif - +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +#define VK_NV_RAY_TRACING_SPEC_VERSION 3 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; + + +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; -#define VK_EXT_display_control 1 -#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" -typedef enum VkDisplayPowerStateEXT { - VK_DISPLAY_POWER_STATE_OFF_EXT = 0, - VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, - VK_DISPLAY_POWER_STATE_ON_EXT = 2, - VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, - VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, - VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), - VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayPowerStateEXT; +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; -typedef enum VkDeviceEventTypeEXT { - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, - VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), - VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDeviceEventTypeEXT; +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; -typedef enum VkDisplayEventTypeEXT { - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, - VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), - VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayEventTypeEXT; -typedef struct VkDisplayPowerInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayPowerStateEXT powerState; -} VkDisplayPowerInfoEXT; +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; -typedef struct VkDeviceEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDeviceEventTypeEXT deviceEvent; -} VkDeviceEventInfoEXT; +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; -typedef struct VkDisplayEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayEventTypeEXT displayEvent; -} VkDisplayEventInfoEXT; +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; -typedef struct VkSwapchainCounterCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSurfaceCounterFlagsEXT surfaceCounters; -} VkSwapchainCounterCreateInfoEXT; +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; -typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; +} VkGeometryNV; -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( - VkDevice device, - const VkDeviceEventInfoEXT* pDeviceEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT* pDisplayEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( - VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t* pCounterValue); -#endif +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; -#define VK_GOOGLE_display_timing 1 -#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 -#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" -typedef struct VkRefreshCycleDurationGOOGLE { - uint64_t refreshDuration; -} VkRefreshCycleDurationGOOGLE; +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; -typedef struct VkPastPresentationTimingGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; - uint64_t actualPresentTime; - uint64_t earliestPresentTime; - uint64_t presentMargin; -} VkPastPresentationTimingGOOGLE; +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; -typedef struct VkPresentTimeGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; -} VkPresentTimeGOOGLE; +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; -typedef struct VkPresentTimesInfoGOOGLE { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentTimeGOOGLE* pTimes; -} VkPresentTimesInfoGOOGLE; +typedef VkTransformMatrixKHR VkTransformMatrixNV; -typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pPresentationTimingCount, - VkPastPresentationTimingGOOGLE* pPresentationTimings); -#endif + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); -#define VK_NV_sample_mask_override_coverage 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode); -#define VK_NV_geometry_shader_passthrough 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); -#define VK_NV_viewport_array2 1 -#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); -#define VK_NVX_multiview_per_view_attributes 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" -typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - VkStructureType sType; - void* pNext; - VkBool32 perViewPositionAllComponents; -} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif -#define VK_NV_viewport_swizzle 1 -#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" -typedef enum VkViewportCoordinateSwizzleNV { - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, - VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), - VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF -} VkViewportCoordinateSwizzleNV; -typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; -typedef struct VkViewportSwizzleNV { - VkViewportCoordinateSwizzleNV x; - VkViewportCoordinateSwizzleNV y; - VkViewportCoordinateSwizzleNV z; - VkViewportCoordinateSwizzleNV w; -} VkViewportSwizzleNV; +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; -typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineViewportSwizzleStateCreateFlagsNV flags; - uint32_t viewportCount; - const VkViewportSwizzleNV* pViewportSwizzles; -} VkPipelineViewportSwizzleStateCreateInfoNV; +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; -#define VK_EXT_discard_rectangles 1 -#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 -#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" +#define VK_EXT_filter_cubic 1 +#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 +#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" +typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { + VkStructureType sType; + void* pNext; + VkImageViewType imageViewType; +} VkPhysicalDeviceImageViewImageFormatInfoEXT; -typedef enum VkDiscardRectangleModeEXT { - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, - VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, - VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), - VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDiscardRectangleModeEXT; -typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; -typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { +typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { VkStructureType sType; void* pNext; - uint32_t maxDiscardRectangles; -} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + VkBool32 filterCubic; + VkBool32 filterCubicMinmax; +} VkFilterCubicImageViewImageFormatPropertiesEXT; -typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineDiscardRectangleStateCreateFlagsEXT flags; - VkDiscardRectangleModeEXT discardRectangleMode; - uint32_t discardRectangleCount; - const VkRect2D* pDiscardRectangles; -} VkPipelineDiscardRectangleStateCreateInfoEXT; -typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( - VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D* pDiscardRectangles); -#endif +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" -#define VK_EXT_conservative_rasterization 1 -#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 -#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" +typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT; -typedef enum VkConservativeRasterizationModeEXT { - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, - VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, - VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, - VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, - VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, - VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1), - VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkConservativeRasterizationModeEXT; -typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; -typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { - VkStructureType sType; - void* pNext; - float primitiveOverestimationSize; - float maxExtraPrimitiveOverestimationSize; - float extraPrimitiveOverestimationSizeGranularity; - VkBool32 primitiveUnderestimation; - VkBool32 conservativePointAndLineRasterization; - VkBool32 degenerateTrianglesRasterized; - VkBool32 degenerateLinesRasterized; - VkBool32 fullyCoveredFragmentShaderInputVariable; - VkBool32 conservativeRasterizationPostDepthCoverage; -} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; +typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT; -typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; - VkConservativeRasterizationModeEXT conservativeRasterizationMode; - float extraPrimitiveOverestimationSize; -} VkPipelineRasterizationConservativeStateCreateInfoEXT; +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; -#define VK_EXT_depth_clip_enable 1 -#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 -#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" -typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; -typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 depthClipEnable; -} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; -typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; - VkBool32 depthClipEnable; -} VkPipelineRasterizationDepthClipStateCreateInfoEXT; +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif -#define VK_EXT_swapchain_colorspace 1 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif -#define VK_EXT_hdr_metadata 1 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 2 -#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" -typedef struct VkXYColorEXT { - float x; - float y; -} VkXYColorEXT; -typedef struct VkHdrMetadataEXT { +#define VK_AMD_pipeline_compiler_control 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" + +typedef enum VkPipelineCompilerControlFlagBitsAMD { + VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkPipelineCompilerControlFlagBitsAMD; +typedef VkFlags VkPipelineCompilerControlFlagsAMD; +typedef struct VkPipelineCompilerControlCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkPipelineCompilerControlFlagsAMD compilerControlFlags; +} VkPipelineCompilerControlCreateInfoAMD; + + + +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" + +typedef enum VkTimeDomainEXT { + VK_TIME_DOMAIN_DEVICE_EXT = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3, + VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF +} VkTimeDomainEXT; +typedef struct VkCalibratedTimestampInfoEXT { VkStructureType sType; const void* pNext; - VkXYColorEXT displayPrimaryRed; - VkXYColorEXT displayPrimaryGreen; - VkXYColorEXT displayPrimaryBlue; - VkXYColorEXT whitePoint; - float maxLuminance; - float minLuminance; - float maxContentLightLevel; - float maxFrameAverageLightLevel; -} VkHdrMetadataEXT; + VkTimeDomainEXT timeDomain; +} VkCalibratedTimestampInfoEXT; -typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainEXT* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR* pSwapchains, - const VkHdrMetadataEXT* pMetadata); + uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); #endif -#define VK_EXT_external_memory_dma_buf 1 -#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 -#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" - +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; -#define VK_EXT_queue_family_foreign 1 -#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 -#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" -#define VK_QUEUE_FAMILY_FOREIGN_EXT (~0U-2) -#define VK_EXT_debug_utils 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) -#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 1 -#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" -typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; -typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" -typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { - VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugUtilsMessageSeverityFlagBitsEXT; -typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; -typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { - VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, - VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, - VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, - VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugUtilsMessageTypeFlagBitsEXT; -typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; -typedef struct VkDebugUtilsObjectNameInfoEXT { - VkStructureType sType; - const void* pNext; - VkObjectType objectType; - uint64_t objectHandle; - const char* pObjectName; -} VkDebugUtilsObjectNameInfoEXT; -typedef struct VkDebugUtilsObjectTagInfoEXT { - VkStructureType sType; - const void* pNext; - VkObjectType objectType; - uint64_t objectHandle; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugUtilsObjectTagInfoEXT; -typedef struct VkDebugUtilsLabelEXT { +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { VkStructureType sType; - const void* pNext; - const char* pLabelName; - float color[4]; -} VkDebugUtilsLabelEXT; - -typedef struct VkDebugUtilsMessengerCallbackDataEXT { - VkStructureType sType; - const void* pNext; - VkDebugUtilsMessengerCallbackDataFlagsEXT flags; - const char* pMessageIdName; - int32_t messageIdNumber; - const char* pMessage; - uint32_t queueLabelCount; - const VkDebugUtilsLabelEXT* pQueueLabels; - uint32_t cmdBufLabelCount; - const VkDebugUtilsLabelEXT* pCmdBufLabels; - uint32_t objectCount; - const VkDebugUtilsObjectNameInfoEXT* pObjects; -} VkDebugUtilsMessengerCallbackDataEXT; - -typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, - void* pUserData); + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; -typedef struct VkDebugUtilsMessengerCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugUtilsMessengerCreateFlagsEXT flags; - VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; - VkDebugUtilsMessageTypeFlagsEXT messageType; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; - void* pUserData; -} VkDebugUtilsMessengerCreateInfoEXT; +typedef struct VkVertexInputBindingDivisorDescriptionEXT { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionEXT; -typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); -typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); -typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); -typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); -typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoEXT; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( - VkDevice device, - const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( - VkDevice device, - const VkDebugUtilsObjectTagInfoEXT* pTagInfo); -VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( - VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo); -VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( - VkQueue queue); +#define VK_EXT_pipeline_creation_feedback 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" +typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT; -VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( - VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer); +typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( - VkInstance instance, - const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDebugUtilsMessengerEXT* pMessenger); -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( - VkInstance instance, - VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks* pAllocator); +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" -VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( - VkInstance instance, - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); -#endif +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; -#define VK_EXT_sampler_filter_minmax 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 -#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" -typedef enum VkSamplerReductionModeEXT { - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, - VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, - VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, - VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, - VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, - VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), - VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSamplerReductionModeEXT; -typedef struct VkSamplerReductionModeCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSamplerReductionModeEXT reductionMode; -} VkSamplerReductionModeCreateInfoEXT; -typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 filterMinmaxSingleComponentFormats; - VkBool32 filterMinmaxImageComponentMapping; -} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; - + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; -#define VK_AMD_gpu_shader_int16 1 -#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 -#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -#define VK_AMD_mixed_attachment_samples 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); -#define VK_AMD_shader_fragment_mask 1 -#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 -#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif -#define VK_EXT_inline_uniform_block 1 -#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 -#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" -typedef struct VkPhysicalDeviceInlineUniformBlockFeaturesEXT { +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 inlineUniformBlock; - VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; -} VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + -typedef struct VkPhysicalDeviceInlineUniformBlockPropertiesEXT { +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { VkStructureType sType; void* pNext; - uint32_t maxInlineUniformBlockSize; - uint32_t maxPerStageDescriptorInlineUniformBlocks; - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; - uint32_t maxDescriptorSetInlineUniformBlocks; - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; -} VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + -typedef struct VkWriteDescriptorSetInlineUniformBlockEXT { + +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { VkStructureType sType; const void* pNext; - uint32_t dataSize; - const void* pData; -} VkWriteDescriptorSetInlineUniformBlockEXT; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; -typedef struct VkDescriptorPoolInlineUniformBlockCreateInfoEXT { +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { VkStructureType sType; - const void* pNext; - uint32_t maxInlineUniformBlockBindings; -} VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif -#define VK_EXT_shader_stencil_export 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; -#define VK_EXT_sample_locations 1 -#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 -#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" -typedef struct VkSampleLocationEXT { - float x; - float y; -} VkSampleLocationEXT; +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; -typedef struct VkSampleLocationsInfoEXT { - VkStructureType sType; - const void* pNext; - VkSampleCountFlagBits sampleLocationsPerPixel; - VkExtent2D sampleLocationGridSize; - uint32_t sampleLocationsCount; - const VkSampleLocationEXT* pSampleLocations; -} VkSampleLocationsInfoEXT; +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); -typedef struct VkAttachmentSampleLocationsEXT { - uint32_t attachmentIndex; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkAttachmentSampleLocationsEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); -typedef struct VkSubpassSampleLocationsEXT { - uint32_t subpassIndex; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkSubpassSampleLocationsEXT; +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); +#endif -typedef struct VkRenderPassSampleLocationsBeginInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t attachmentInitialSampleLocationsCount; - const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; - uint32_t postSubpassSampleLocationsCount; - const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; -} VkRenderPassSampleLocationsBeginInfoEXT; -typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 sampleLocationsEnable; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkPipelineSampleLocationsStateCreateInfoEXT; +#define VK_INTEL_shader_integer_functions2 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" +typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerFunctions2; +} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + + +#define VK_INTEL_performance_query 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 +#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" + +typedef enum VkPerformanceConfigurationTypeINTEL { + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, + VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceConfigurationTypeINTEL; + +typedef enum VkQueryPoolSamplingModeINTEL { + VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, + VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkQueryPoolSamplingModeINTEL; -typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { - VkStructureType sType; - void* pNext; - VkSampleCountFlags sampleLocationSampleCounts; - VkExtent2D maxSampleLocationGridSize; - float sampleLocationCoordinateRange[2]; - uint32_t sampleLocationSubPixelBits; - VkBool32 variableSampleLocations; -} VkPhysicalDeviceSampleLocationsPropertiesEXT; +typedef enum VkPerformanceOverrideTypeINTEL { + VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, + VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, + VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceOverrideTypeINTEL; -typedef struct VkMultisamplePropertiesEXT { - VkStructureType sType; - void* pNext; - VkExtent2D maxSampleLocationGridSize; -} VkMultisamplePropertiesEXT; +typedef enum VkPerformanceParameterTypeINTEL { + VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, + VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, + VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceParameterTypeINTEL; -typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); +typedef enum VkPerformanceValueTypeINTEL { + VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, + VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, + VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, + VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, + VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, + VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceValueTypeINTEL; +typedef union VkPerformanceValueDataINTEL { + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +} VkPerformanceValueDataINTEL; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( - VkCommandBuffer commandBuffer, - const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef struct VkPerformanceValueINTEL { + VkPerformanceValueTypeINTEL type; + VkPerformanceValueDataINTEL data; +} VkPerformanceValueINTEL; -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( - VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT* pMultisampleProperties); -#endif +typedef struct VkInitializePerformanceApiInfoINTEL { + VkStructureType sType; + const void* pNext; + void* pUserData; +} VkInitializePerformanceApiInfoINTEL; +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { + VkStructureType sType; + const void* pNext; + VkQueryPoolSamplingModeINTEL performanceCountersSampling; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; -#define VK_EXT_blend_operation_advanced 1 -#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 -#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; -typedef enum VkBlendOverlapEXT { - VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, - VK_BLEND_OVERLAP_DISJOINT_EXT = 1, - VK_BLEND_OVERLAP_CONJOINT_EXT = 2, - VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, - VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), - VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF -} VkBlendOverlapEXT; -typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { +typedef struct VkPerformanceMarkerInfoINTEL { VkStructureType sType; - void* pNext; - VkBool32 advancedBlendCoherentOperations; -} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + const void* pNext; + uint64_t marker; +} VkPerformanceMarkerInfoINTEL; -typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { +typedef struct VkPerformanceStreamMarkerInfoINTEL { VkStructureType sType; - void* pNext; - uint32_t advancedBlendMaxColorAttachments; - VkBool32 advancedBlendIndependentBlend; - VkBool32 advancedBlendNonPremultipliedSrcColor; - VkBool32 advancedBlendNonPremultipliedDstColor; - VkBool32 advancedBlendCorrelatedOverlap; - VkBool32 advancedBlendAllOperations; -} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + const void* pNext; + uint32_t marker; +} VkPerformanceStreamMarkerInfoINTEL; -typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 srcPremultiplied; - VkBool32 dstPremultiplied; - VkBlendOverlapEXT blendOverlap; -} VkPipelineColorBlendAdvancedStateCreateInfoEXT; +typedef struct VkPerformanceOverrideInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceOverrideTypeINTEL type; + VkBool32 enable; + uint64_t parameter; +} VkPerformanceOverrideInfoINTEL; +typedef struct VkPerformanceConfigurationAcquireInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceConfigurationTypeINTEL type; +} VkPerformanceConfigurationAcquireInfoINTEL; +typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); +typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); -#define VK_NV_fragment_coverage_to_color 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" -typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; -typedef struct VkPipelineCoverageToColorStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageToColorStateCreateFlagsNV flags; - VkBool32 coverageToColorEnable; - uint32_t coverageToColorLocation; -} VkPipelineCoverageToColorStateCreateInfoNV; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( + VkDevice device, + const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( + VkDevice device); +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo); -#define VK_NV_framebuffer_mixed_samples 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); -typedef enum VkCoverageModulationModeNV { - VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, - VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, - VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, - VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, - VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, - VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, - VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), - VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageModulationModeNV; -typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; -typedef struct VkPipelineCoverageModulationStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageModulationStateCreateFlagsNV flags; - VkCoverageModulationModeNV coverageModulationMode; - VkBool32 coverageModulationTableEnable; - uint32_t coverageModulationTableCount; - const float* pCoverageModulationTable; -} VkPipelineCoverageModulationStateCreateInfoNV; +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( + VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration); +VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( + VkDevice device, + VkPerformanceConfigurationINTEL configuration); -#define VK_NV_fill_rectangle 1 -#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 -#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( + VkQueue queue, + VkPerformanceConfigurationINTEL configuration); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( + VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue); +#endif -#define VK_NV_shader_sm_builtins 1 -#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 -#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" -typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t shaderSMCount; - uint32_t shaderWarpsPerSM; -} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; -typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 shaderSMBuiltins; -} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + uint32_t pciDomain; + uint32_t pciBus; + uint32_t pciDevice; + uint32_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; -#define VK_EXT_post_depth_coverage 1 -#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 -#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" +#define VK_AMD_display_native_hdr 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" +typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 localDimmingSupport; +} VkDisplayNativeHdrSurfaceCapabilitiesAMD; +typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkBool32 localDimmingEnable; +} VkSwapchainDisplayNativeHdrCreateInfoAMD; -#define VK_EXT_image_drm_format_modifier 1 -#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 1 -#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" -typedef struct VkDrmFormatModifierPropertiesEXT { - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - VkFormatFeatureFlags drmFormatModifierTilingFeatures; -} VkDrmFormatModifierPropertiesEXT; +typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); -typedef struct VkDrmFormatModifierPropertiesListEXT { - VkStructureType sType; - void* pNext; - uint32_t drmFormatModifierCount; - VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; -} VkDrmFormatModifierPropertiesListEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( + VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable); +#endif -typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + +#define VK_EXT_fragment_density_map 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 +#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" +typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { VkStructureType sType; - const void* pNext; - uint64_t drmFormatModifier; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; -} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + void* pNext; + VkBool32 fragmentDensityMap; + VkBool32 fragmentDensityMapDynamic; + VkBool32 fragmentDensityMapNonSubsampledImages; +} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; -typedef struct VkImageDrmFormatModifierListCreateInfoEXT { +typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { VkStructureType sType; - const void* pNext; - uint32_t drmFormatModifierCount; - const uint64_t* pDrmFormatModifiers; -} VkImageDrmFormatModifierListCreateInfoEXT; + void* pNext; + VkExtent2D minFragmentDensityTexelSize; + VkExtent2D maxFragmentDensityTexelSize; + VkBool32 fragmentDensityInvocations; +} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; -typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - const VkSubresourceLayout* pPlaneLayouts; -} VkImageDrmFormatModifierExplicitCreateInfoEXT; +typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkAttachmentReference fragmentDensityMapAttachment; +} VkRenderPassFragmentDensityMapCreateInfoEXT; -typedef struct VkImageDrmFormatModifierPropertiesEXT { - VkStructureType sType; - void* pNext; - uint64_t drmFormatModifier; -} VkImageDrmFormatModifierPropertiesEXT; -typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( - VkDevice device, - VkImage image, - VkImageDrmFormatModifierPropertiesEXT* pProperties); -#endif +#define VK_EXT_scalar_block_layout 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" +typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; -#define VK_EXT_validation_cache 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) -#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" -typedef enum VkValidationCacheHeaderVersionEXT { - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, - VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, - VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, - VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1), - VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCacheHeaderVersionEXT; -typedef VkFlags VkValidationCacheCreateFlagsEXT; -typedef struct VkValidationCacheCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkValidationCacheCreateFlagsEXT flags; - size_t initialDataSize; - const void* pInitialData; -} VkValidationCacheCreateInfoEXT; +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME -typedef struct VkShaderModuleValidationCacheCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkValidationCacheEXT validationCache; -} VkShaderModuleValidationCacheCreateInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); -typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); -typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( - VkDevice device, - const VkValidationCacheCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkValidationCacheEXT* pValidationCache); -VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( - VkDevice device, - VkValidationCacheEXT validationCache, - const VkAllocationCallbacks* pAllocator); +#define VK_EXT_subgroup_size_control 1 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" +typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( - VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT* pSrcCaches); +typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( - VkDevice device, - VkValidationCacheEXT validationCache, - size_t* pDataSize, - void* pData); -#endif +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; -#define VK_EXT_descriptor_indexing 1 -#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 -#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" -typedef enum VkDescriptorBindingFlagBitsEXT { - VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = 0x00000001, - VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = 0x00000002, - VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = 0x00000004, - VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = 0x00000008, - VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDescriptorBindingFlagBitsEXT; -typedef VkFlags VkDescriptorBindingFlagsEXT; -typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t bindingCount; - const VkDescriptorBindingFlagsEXT* pBindingFlags; -} VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; +#define VK_AMD_shader_core_properties2 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" + +typedef enum VkShaderCorePropertiesFlagBitsAMD { + VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderCorePropertiesFlagBitsAMD; +typedef VkFlags VkShaderCorePropertiesFlagsAMD; +typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { + VkStructureType sType; + void* pNext; + VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; + uint32_t activeComputeUnitCount; +} VkPhysicalDeviceShaderCoreProperties2AMD; -typedef struct VkPhysicalDeviceDescriptorIndexingFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 shaderInputAttachmentArrayDynamicIndexing; - VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; - VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; - VkBool32 shaderUniformBufferArrayNonUniformIndexing; - VkBool32 shaderSampledImageArrayNonUniformIndexing; - VkBool32 shaderStorageBufferArrayNonUniformIndexing; - VkBool32 shaderStorageImageArrayNonUniformIndexing; - VkBool32 shaderInputAttachmentArrayNonUniformIndexing; - VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; - VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; - VkBool32 descriptorBindingUniformBufferUpdateAfterBind; - VkBool32 descriptorBindingSampledImageUpdateAfterBind; - VkBool32 descriptorBindingStorageImageUpdateAfterBind; - VkBool32 descriptorBindingStorageBufferUpdateAfterBind; - VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; - VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; - VkBool32 descriptorBindingUpdateUnusedWhilePending; - VkBool32 descriptorBindingPartiallyBound; - VkBool32 descriptorBindingVariableDescriptorCount; - VkBool32 runtimeDescriptorArray; -} VkPhysicalDeviceDescriptorIndexingFeaturesEXT; -typedef struct VkPhysicalDeviceDescriptorIndexingPropertiesEXT { + +#define VK_AMD_device_coherent_memory 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" +typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { VkStructureType sType; void* pNext; - uint32_t maxUpdateAfterBindDescriptorsInAllPools; - VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; - VkBool32 shaderSampledImageArrayNonUniformIndexingNative; - VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; - VkBool32 shaderStorageImageArrayNonUniformIndexingNative; - VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; - VkBool32 robustBufferAccessUpdateAfterBind; - VkBool32 quadDivergentImplicitLod; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; - uint32_t maxPerStageUpdateAfterBindResources; - uint32_t maxDescriptorSetUpdateAfterBindSamplers; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; -} VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + VkBool32 deviceCoherentMemory; +} VkPhysicalDeviceCoherentMemoryFeaturesAMD; -typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t descriptorSetCount; - const uint32_t* pDescriptorCounts; -} VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; -typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupportEXT { + +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { VkStructureType sType; void* pNext; - uint32_t maxVariableDescriptorCount; -} VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; -#define VK_EXT_shader_viewport_index_layer 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" +#define VK_EXT_memory_budget 1 +#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 +#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" +typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; + VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryBudgetPropertiesEXT; -#define VK_NV_shading_rate_image 1 -#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 -#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" -typedef enum VkShadingRatePaletteEntryNV { - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, - VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, - VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, - VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, - VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, - VK_SHADING_RATE_PALETTE_ENTRY_BEGIN_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, - VK_SHADING_RATE_PALETTE_ENTRY_END_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, - VK_SHADING_RATE_PALETTE_ENTRY_RANGE_SIZE_NV = (VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV + 1), - VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF -} VkShadingRatePaletteEntryNV; +#define VK_EXT_memory_priority 1 +#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 +#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" +typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryPriority; +} VkPhysicalDeviceMemoryPriorityFeaturesEXT; -typedef enum VkCoarseSampleOrderTypeNV { - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, - VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, - VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, - VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, - VK_COARSE_SAMPLE_ORDER_TYPE_BEGIN_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, - VK_COARSE_SAMPLE_ORDER_TYPE_END_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV, - VK_COARSE_SAMPLE_ORDER_TYPE_RANGE_SIZE_NV = (VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV + 1), - VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoarseSampleOrderTypeNV; -typedef struct VkShadingRatePaletteNV { - uint32_t shadingRatePaletteEntryCount; - const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; -} VkShadingRatePaletteNV; +typedef struct VkMemoryPriorityAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + float priority; +} VkMemoryPriorityAllocateInfoEXT; -typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 shadingRateImageEnable; - uint32_t viewportCount; - const VkShadingRatePaletteNV* pShadingRatePalettes; -} VkPipelineViewportShadingRateImageStateCreateInfoNV; -typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + +#define VK_NV_dedicated_allocation_image_aliasing 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" +typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 shadingRateImage; - VkBool32 shadingRateCoarseSampleOrder; -} VkPhysicalDeviceShadingRateImageFeaturesNV; + VkBool32 dedicatedAllocationImageAliasing; +} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; -typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + + +#define VK_EXT_buffer_device_address 1 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" +typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { VkStructureType sType; void* pNext; - VkExtent2D shadingRateTexelSize; - uint32_t shadingRatePaletteSize; - uint32_t shadingRateMaxCoarseSamples; -} VkPhysicalDeviceShadingRateImagePropertiesNV; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; -typedef struct VkCoarseSampleLocationNV { - uint32_t pixelX; - uint32_t pixelY; - uint32_t sample; -} VkCoarseSampleLocationNV; +typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; -typedef struct VkCoarseSampleOrderCustomNV { - VkShadingRatePaletteEntryNV shadingRate; - uint32_t sampleCount; - uint32_t sampleLocationCount; - const VkCoarseSampleLocationNV* pSampleLocations; -} VkCoarseSampleOrderCustomNV; +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; -typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkCoarseSampleOrderTypeNV sampleOrderType; - uint32_t customSampleOrderCount; - const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; -} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; +typedef struct VkBufferDeviceAddressCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; +} VkBufferDeviceAddressCreateInfoEXT; -typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); -typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( - VkCommandBuffer commandBuffer, - VkImageView imageView, - VkImageLayout imageLayout); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkShadingRatePaletteNV* pShadingRatePalettes); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( - VkCommandBuffer commandBuffer, - VkCoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); #endif -#define VK_NV_ray_tracing 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) -#define VK_NV_RAY_TRACING_SPEC_VERSION 3 -#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" -#define VK_SHADER_UNUSED_NV (~0U) - -typedef enum VkAccelerationStructureTypeNV { - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = 0, - VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = 1, - VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, - VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV, - VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV + 1), - VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureTypeNV; - -typedef enum VkRayTracingShaderGroupTypeNV { - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = 0, - VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = 1, - VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = 2, - VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, - VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV, - VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_NV = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV + 1), - VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkRayTracingShaderGroupTypeNV; - -typedef enum VkGeometryTypeNV { - VK_GEOMETRY_TYPE_TRIANGLES_NV = 0, - VK_GEOMETRY_TYPE_AABBS_NV = 1, - VK_GEOMETRY_TYPE_BEGIN_RANGE_NV = VK_GEOMETRY_TYPE_TRIANGLES_NV, - VK_GEOMETRY_TYPE_END_RANGE_NV = VK_GEOMETRY_TYPE_AABBS_NV, - VK_GEOMETRY_TYPE_RANGE_SIZE_NV = (VK_GEOMETRY_TYPE_AABBS_NV - VK_GEOMETRY_TYPE_TRIANGLES_NV + 1), - VK_GEOMETRY_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryTypeNV; - -typedef enum VkCopyAccelerationStructureModeNV { - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = 0, - VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = 1, - VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, - VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV, - VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_NV = (VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV + 1), - VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCopyAccelerationStructureModeNV; +#define VK_EXT_tooling_info 1 +#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 +#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" +typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT; -typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV + 1), - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureMemoryRequirementsTypeNV; +typedef VkToolPurposeFlags VkToolPurposeFlagsEXT; -typedef enum VkGeometryFlagBitsNV { - VK_GEOMETRY_OPAQUE_BIT_NV = 0x00000001, - VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = 0x00000002, - VK_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryFlagBitsNV; -typedef VkFlags VkGeometryFlagsNV; - -typedef enum VkGeometryInstanceFlagBitsNV { - VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = 0x00000001, - VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = 0x00000002, - VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = 0x00000004, - VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = 0x00000008, - VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkGeometryInstanceFlagBitsNV; -typedef VkFlags VkGeometryInstanceFlagsNV; - -typedef enum VkBuildAccelerationStructureFlagBitsNV { - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = 0x00000001, - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = 0x00000002, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = 0x00000004, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = 0x00000008, - VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = 0x00000010, - VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkBuildAccelerationStructureFlagBitsNV; -typedef VkFlags VkBuildAccelerationStructureFlagsNV; -typedef struct VkRayTracingShaderGroupCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkRayTracingShaderGroupTypeNV type; - uint32_t generalShader; - uint32_t closestHitShader; - uint32_t anyHitShader; - uint32_t intersectionShader; -} VkRayTracingShaderGroupCreateInfoNV; +typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT; -typedef struct VkRayTracingPipelineCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo* pStages; - uint32_t groupCount; - const VkRayTracingShaderGroupCreateInfoNV* pGroups; - uint32_t maxRecursionDepth; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkRayTracingPipelineCreateInfoNV; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); -typedef struct VkGeometryTrianglesNV { - VkStructureType sType; - const void* pNext; - VkBuffer vertexData; - VkDeviceSize vertexOffset; - uint32_t vertexCount; - VkDeviceSize vertexStride; - VkFormat vertexFormat; - VkBuffer indexData; - VkDeviceSize indexOffset; - uint32_t indexCount; - VkIndexType indexType; - VkBuffer transformData; - VkDeviceSize transformOffset; -} VkGeometryTrianglesNV; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); +#endif -typedef struct VkGeometryAABBNV { - VkStructureType sType; - const void* pNext; - VkBuffer aabbData; - uint32_t numAABBs; - uint32_t stride; - VkDeviceSize offset; -} VkGeometryAABBNV; -typedef struct VkGeometryDataNV { - VkGeometryTrianglesNV triangles; - VkGeometryAABBNV aabbs; -} VkGeometryDataNV; +#define VK_EXT_separate_stencil_usage 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" +typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; -typedef struct VkGeometryNV { - VkStructureType sType; - const void* pNext; - VkGeometryTypeNV geometryType; - VkGeometryDataNV geometry; - VkGeometryFlagsNV flags; -} VkGeometryNV; -typedef struct VkAccelerationStructureInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureTypeNV type; - VkBuildAccelerationStructureFlagsNV flags; - uint32_t instanceCount; - uint32_t geometryCount; - const VkGeometryNV* pGeometries; -} VkAccelerationStructureInfoNV; -typedef struct VkAccelerationStructureCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkDeviceSize compactedSize; - VkAccelerationStructureInfoNV info; -} VkAccelerationStructureCreateInfoNV; +#define VK_EXT_validation_features 1 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5 +#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" -typedef struct VkBindAccelerationStructureMemoryInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureNV accelerationStructure; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindAccelerationStructureMemoryInfoNV; +typedef enum VkValidationFeatureEnableEXT { + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, + VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, + VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureEnableEXT; -typedef struct VkWriteDescriptorSetAccelerationStructureNV { - VkStructureType sType; - const void* pNext; - uint32_t accelerationStructureCount; - const VkAccelerationStructureNV* pAccelerationStructures; -} VkWriteDescriptorSetAccelerationStructureNV; +typedef enum VkValidationFeatureDisableEXT { + VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, + VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, + VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, + VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, + VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, + VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, + VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, + VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureDisableEXT; +typedef struct VkValidationFeaturesEXT { + VkStructureType sType; + const void* pNext; + uint32_t enabledValidationFeatureCount; + const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; + uint32_t disabledValidationFeatureCount; + const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; +} VkValidationFeaturesEXT; -typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureMemoryRequirementsTypeNV type; - VkAccelerationStructureNV accelerationStructure; -} VkAccelerationStructureMemoryRequirementsInfoNV; -typedef struct VkPhysicalDeviceRayTracingPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t shaderGroupHandleSize; - uint32_t maxRecursionDepth; - uint32_t maxShaderGroupStride; - uint32_t shaderGroupBaseAlignment; - uint64_t maxGeometryCount; - uint64_t maxInstanceCount; - uint64_t maxTriangleCount; - uint32_t maxDescriptorSetAccelerationStructures; -} VkPhysicalDeviceRayTracingPropertiesNV; -typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); -typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode); -typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); -typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); -typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); +#define VK_NV_cooperative_matrix 1 +#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( - VkDevice device, - const VkAccelerationStructureCreateInfoNV* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkAccelerationStructureNV* pAccelerationStructure); +typedef enum VkComponentTypeNV { + VK_COMPONENT_TYPE_FLOAT16_NV = 0, + VK_COMPONENT_TYPE_FLOAT32_NV = 1, + VK_COMPONENT_TYPE_FLOAT64_NV = 2, + VK_COMPONENT_TYPE_SINT8_NV = 3, + VK_COMPONENT_TYPE_SINT16_NV = 4, + VK_COMPONENT_TYPE_SINT32_NV = 5, + VK_COMPONENT_TYPE_SINT64_NV = 6, + VK_COMPONENT_TYPE_UINT8_NV = 7, + VK_COMPONENT_TYPE_UINT16_NV = 8, + VK_COMPONENT_TYPE_UINT32_NV = 9, + VK_COMPONENT_TYPE_UINT64_NV = 10, + VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkComponentTypeNV; -VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( - VkDevice device, - VkAccelerationStructureNV accelerationStructure, - const VkAllocationCallbacks* pAllocator); +typedef enum VkScopeNV { + VK_SCOPE_DEVICE_NV = 1, + VK_SCOPE_WORKGROUP_NV = 2, + VK_SCOPE_SUBGROUP_NV = 3, + VK_SCOPE_QUEUE_FAMILY_NV = 5, + VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkScopeNV; +typedef struct VkCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeNV AType; + VkComponentTypeNV BType; + VkComponentTypeNV CType; + VkComponentTypeNV DType; + VkScopeNV scope; +} VkCooperativeMatrixPropertiesNV; -VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( - VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, - VkMemoryRequirements2KHR* pMemoryRequirements); +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesNV; -VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( - VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); -VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( - VkCommandBuffer commandBuffer, - const VkAccelerationStructureInfoNV* pInfo, - VkBuffer instanceData, - VkDeviceSize instanceOffset, - VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkBuffer scratch, - VkDeviceSize scratchOffset); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( - VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeNV mode); -VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( - VkCommandBuffer commandBuffer, - VkBuffer raygenShaderBindingTableBuffer, - VkDeviceSize raygenShaderBindingOffset, - VkBuffer missShaderBindingTableBuffer, - VkDeviceSize missShaderBindingOffset, - VkDeviceSize missShaderBindingStride, - VkBuffer hitShaderBindingTableBuffer, - VkDeviceSize hitShaderBindingOffset, - VkDeviceSize hitShaderBindingStride, - VkBuffer callableShaderBindingTableBuffer, - VkDeviceSize callableShaderBindingOffset, - VkDeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth); +#define VK_NV_coverage_reduction_mode 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoNV* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); +typedef enum VkCoverageReductionModeNV { + VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, + VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, + VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageReductionModeNV; +typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; +typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 coverageReductionMode; +} VkPhysicalDeviceCoverageReductionModeFeaturesNV; -VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( - VkDevice device, - VkPipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void* pData); +typedef struct VkPipelineCoverageReductionStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageReductionStateCreateFlagsNV flags; + VkCoverageReductionModeNV coverageReductionMode; +} VkPipelineCoverageReductionStateCreateInfoNV; -VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( - VkDevice device, - VkAccelerationStructureNV accelerationStructure, - size_t dataSize, - void* pData); +typedef struct VkFramebufferMixedSamplesCombinationNV { + VkStructureType sType; + void* pNext; + VkCoverageReductionModeNV coverageReductionMode; + VkSampleCountFlagBits rasterizationSamples; + VkSampleCountFlags depthStencilSamples; + VkSampleCountFlags colorSamples; +} VkFramebufferMixedSamplesCombinationNV; -VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( - VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureNV* pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); -VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( - VkDevice device, - VkPipeline pipeline, - uint32_t shader); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations); #endif -#define VK_NV_representative_fragment_test 1 -#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 -#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" -typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { +#define VK_EXT_fragment_shader_interlock 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" +typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 representativeFragmentTest; -} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + VkBool32 fragmentShaderSampleInterlock; + VkBool32 fragmentShaderPixelInterlock; + VkBool32 fragmentShaderShadingRateInterlock; +} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; -typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + + +#define VK_EXT_ycbcr_image_arrays 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" +typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { VkStructureType sType; - const void* pNext; - VkBool32 representativeFragmentTestEnable; -} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + void* pNext; + VkBool32 ycbcrImageArrays; +} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; -#define VK_EXT_filter_cubic 1 -#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 2 -#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" -typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { VkStructureType sType; void* pNext; - VkImageViewType imageViewType; -} VkPhysicalDeviceImageViewImageFormatInfoEXT; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; -typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 filterCubic; - VkBool32 filterCubicMinmax ; -} VkFilterCubicImageViewImageFormatPropertiesEXT; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; -#define VK_EXT_global_priority 1 -#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 -#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" +#define VK_EXT_headless_surface 1 +#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 +#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" +typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; +typedef struct VkHeadlessSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkHeadlessSurfaceCreateFlagsEXT flags; +} VkHeadlessSurfaceCreateInfoEXT; -typedef enum VkQueueGlobalPriorityEXT { - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = 128, - VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256, - VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512, - VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024, - VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, - VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, - VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1), - VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF -} VkQueueGlobalPriorityEXT; -typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkQueueGlobalPriorityEXT globalPriority; -} VkDeviceQueueGlobalPriorityCreateInfoEXT; +typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( + VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif -#define VK_EXT_external_memory_host 1 -#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 -#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" -typedef struct VkImportMemoryHostPointerInfoEXT { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBits handleType; - void* pHostPointer; -} VkImportMemoryHostPointerInfoEXT; +#define VK_EXT_line_rasterization 1 +#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" -typedef struct VkMemoryHostPointerPropertiesEXT { +typedef enum VkLineRasterizationModeEXT { + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLineRasterizationModeEXT; +typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT { VkStructureType sType; void* pNext; - uint32_t memoryTypeBits; -} VkMemoryHostPointerPropertiesEXT; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeaturesEXT; -typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { +typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT { VkStructureType sType; void* pNext; - VkDeviceSize minImportedHostPointerAlignment; -} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationPropertiesEXT; -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +typedef struct VkPipelineRasterizationLineStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkLineRasterizationModeEXT lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + +#define VK_EXT_host_query_reset 1 +#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 +#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" +typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - const void* pHostPointer, - VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); #endif -#define VK_AMD_buffer_marker 1 -#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 -#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" -typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +#define VK_EXT_index_type_uint8 1 +#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" +typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + + +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker); -#endif + VkCullModeFlags cullMode); +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); -#define VK_AMD_pipeline_compiler_control 1 -#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 -#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); -typedef enum VkPipelineCompilerControlFlagBitsAMD { - VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF -} VkPipelineCompilerControlFlagBitsAMD; -typedef VkFlags VkPipelineCompilerControlFlagsAMD; -typedef struct VkPipelineCompilerControlCreateInfoAMD { - VkStructureType sType; - const void* pNext; - VkPipelineCompilerControlFlagsAMD compilerControlFlags; -} VkPipelineCompilerControlCreateInfoAMD; +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); -#define VK_EXT_calibrated_timestamps 1 -#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 -#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); -typedef enum VkTimeDomainEXT { - VK_TIME_DOMAIN_DEVICE_EXT = 0, - VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1, - VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2, - VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3, - VK_TIME_DOMAIN_BEGIN_RANGE_EXT = VK_TIME_DOMAIN_DEVICE_EXT, - VK_TIME_DOMAIN_END_RANGE_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT, - VK_TIME_DOMAIN_RANGE_SIZE_EXT = (VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - VK_TIME_DOMAIN_DEVICE_EXT + 1), - VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF -} VkTimeDomainEXT; -typedef struct VkCalibratedTimestampInfoEXT { - VkStructureType sType; - const void* pNext; - VkTimeDomainEXT timeDomain; -} VkCalibratedTimestampInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains); -typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( - VkPhysicalDevice physicalDevice, - uint32_t* pTimeDomainCount, - VkTimeDomainEXT* pTimeDomains); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); -VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( - VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoEXT* pTimestampInfos, - uint64_t* pTimestamps, - uint64_t* pMaxDeviation); +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); #endif -#define VK_AMD_shader_core_properties 1 -#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 -#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" -typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { +#define VK_EXT_shader_atomic_float2 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" +typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { VkStructureType sType; void* pNext; - uint32_t shaderEngineCount; - uint32_t shaderArraysPerEngineCount; - uint32_t computeUnitsPerShaderArray; - uint32_t simdPerComputeUnit; - uint32_t wavefrontsPerSimd; - uint32_t wavefrontSize; - uint32_t sgprsPerSimd; - uint32_t minSgprAllocation; - uint32_t maxSgprAllocation; - uint32_t sgprAllocationGranularity; - uint32_t vgprsPerSimd; - uint32_t minVgprAllocation; - uint32_t maxVgprAllocation; - uint32_t vgprAllocationGranularity; -} VkPhysicalDeviceShaderCorePropertiesAMD; + VkBool32 shaderBufferFloat16Atomics; + VkBool32 shaderBufferFloat16AtomicAdd; + VkBool32 shaderBufferFloat16AtomicMinMax; + VkBool32 shaderBufferFloat32AtomicMinMax; + VkBool32 shaderBufferFloat64AtomicMinMax; + VkBool32 shaderSharedFloat16Atomics; + VkBool32 shaderSharedFloat16AtomicAdd; + VkBool32 shaderSharedFloat16AtomicMinMax; + VkBool32 shaderSharedFloat32AtomicMinMax; + VkBool32 shaderSharedFloat64AtomicMinMax; + VkBool32 shaderImageFloat32AtomicMinMax; + VkBool32 sparseImageFloat32AtomicMinMax; +} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; -#define VK_AMD_memory_overallocation_behavior 1 -#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 -#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" +#define VK_EXT_shader_demote_to_helper_invocation 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" +typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + + + +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; -typedef enum VkMemoryOverallocationBehaviorAMD { - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_BEGIN_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_END_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_RANGE_SIZE_AMD = (VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD + 1), - VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF -} VkMemoryOverallocationBehaviorAMD; -typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { VkStructureType sType; const void* pNext; - VkMemoryOverallocationBehaviorAMD overallocationBehavior; -} VkDeviceMemoryOverallocationCreateInfoAMD; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -#define VK_EXT_vertex_attribute_divisor 1 -#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 -#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" -typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { VkStructureType sType; void* pNext; - uint32_t maxVertexAttribDivisor; -} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; -typedef struct VkVertexInputBindingDivisorDescriptionEXT { - uint32_t binding; - uint32_t divisor; -} VkVertexInputBindingDivisorDescriptionEXT; +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; -typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t vertexBindingDivisorCount; - const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; -} VkPipelineVertexInputDivisorStateCreateInfoEXT; -typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT { + +#define VK_EXT_texel_buffer_alignment 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" +typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 vertexAttributeInstanceRateDivisor; - VkBool32 vertexAttributeInstanceRateZeroDivisor; -} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + VkBool32 texelBufferAlignment; +} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; +typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; -#define VK_EXT_pipeline_creation_feedback 1 -#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 -#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" -typedef enum VkPipelineCreationFeedbackFlagBitsEXT { - VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = 0x00000001, - VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = 0x00000002, - VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = 0x00000004, - VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkPipelineCreationFeedbackFlagBitsEXT; -typedef VkFlags VkPipelineCreationFeedbackFlagsEXT; -typedef struct VkPipelineCreationFeedbackEXT { - VkPipelineCreationFeedbackFlagsEXT flags; - uint64_t duration; -} VkPipelineCreationFeedbackEXT; - -typedef struct VkPipelineCreationFeedbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineCreationFeedbackEXT* pPipelineCreationFeedback; - uint32_t pipelineStageCreationFeedbackCount; - VkPipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks; -} VkPipelineCreationFeedbackCreateInfoEXT; +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; -#define VK_NV_shader_subgroup_partitioned 1 -#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 -#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" -#define VK_NV_compute_shader_derivatives 1 -#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 -#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" -typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 computeDerivativeGroupQuads; - VkBool32 computeDerivativeGroupLinear; -} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; - + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); -#define VK_NV_mesh_shader 1 -#define VK_NV_MESH_SHADER_SPEC_VERSION 1 -#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" -typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 taskShader; - VkBool32 meshShader; -} VkPhysicalDeviceMeshShaderFeaturesNV; +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; -typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t maxDrawMeshTasksCount; - uint32_t maxTaskWorkGroupInvocations; - uint32_t maxTaskWorkGroupSize[3]; - uint32_t maxTaskTotalMemorySize; - uint32_t maxTaskOutputCount; - uint32_t maxMeshWorkGroupInvocations; - uint32_t maxMeshWorkGroupSize[3]; - uint32_t maxMeshTotalMemorySize; - uint32_t maxMeshOutputVertices; - uint32_t maxMeshOutputPrimitives; - uint32_t maxMeshMultiviewViewCount; - uint32_t meshOutputPerVertexGranularity; - uint32_t meshOutputPerPrimitiveGranularity; -} VkPhysicalDeviceMeshShaderPropertiesNV; -typedef struct VkDrawMeshTasksIndirectCommandNV { - uint32_t taskCount; - uint32_t firstTask; -} VkDrawMeshTasksIndirectCommandNV; -typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); -typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +#define VK_EXT_acquire_drm_display 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( - VkCommandBuffer commandBuffer, - uint32_t taskCount, - uint32_t firstTask); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + VkDisplayKHR display); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); +VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR* display); #endif -#define VK_NV_fragment_shader_barycentric 1 -#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" -typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV { +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 fragmentShaderBarycentric; -} VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; - - + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; -#define VK_NV_shader_image_footprint 1 -#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 -#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" -typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 imageFootprint; -} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; -#define VK_NV_scissor_exclusive 1 -#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1 -#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" -typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { VkStructureType sType; - const void* pNext; - uint32_t exclusiveScissorCount; - const VkRect2D* pExclusiveScissors; -} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; -typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 exclusiveScissor; -} VkPhysicalDeviceExclusiveScissorFeaturesNV; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; -typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( - VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkRect2D* pExclusiveScissors); -#endif +#define VK_GOOGLE_user_type 1 +#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 +#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" -#define VK_NV_device_diagnostic_checkpoints 1 -#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 -#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" -typedef struct VkQueueFamilyCheckpointPropertiesNV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags checkpointExecutionStageMask; -} VkQueueFamilyCheckpointPropertiesNV; -typedef struct VkCheckpointDataNV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlagBits stage; - void* pCheckpointMarker; -} VkCheckpointDataNV; +#define VK_EXT_private_data 1 +typedef VkPrivateDataSlot VkPrivateDataSlotEXT; -typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); -typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" +typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT; + +typedef VkPrivateDataSlotCreateFlagBits VkPrivateDataSlotCreateFlagBitsEXT; + +typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT; + +typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( - VkCommandBuffer commandBuffer, - const void* pCheckpointMarker); +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); -VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( - VkQueue queue, - uint32_t* pCheckpointDataCount, - VkCheckpointDataNV* pCheckpointData); +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); #endif -#define VK_INTEL_shader_integer_functions2 1 -#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 -#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" -typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 shaderIntegerFunctions2; -} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + + +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" + +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; + +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; -#define VK_INTEL_performance_query 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) -#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 1 -#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; -typedef enum VkPerformanceConfigurationTypeINTEL { - VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, - VK_PERFORMANCE_CONFIGURATION_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL, - VK_PERFORMANCE_CONFIGURATION_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL, - VK_PERFORMANCE_CONFIGURATION_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL - VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + 1), - VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceConfigurationTypeINTEL; +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); -typedef enum VkQueryPoolSamplingModeINTEL { - VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, - VK_QUERY_POOL_SAMPLING_MODE_BEGIN_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL, - VK_QUERY_POOL_SAMPLING_MODE_END_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL, - VK_QUERY_POOL_SAMPLING_MODE_RANGE_SIZE_INTEL = (VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL - VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + 1), - VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkQueryPoolSamplingModeINTEL; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif -typedef enum VkPerformanceOverrideTypeINTEL { - VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, - VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, - VK_PERFORMANCE_OVERRIDE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, - VK_PERFORMANCE_OVERRIDE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL, - VK_PERFORMANCE_OVERRIDE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL - VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL + 1), - VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceOverrideTypeINTEL; -typedef enum VkPerformanceParameterTypeINTEL { - VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, - VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, - VK_PERFORMANCE_PARAMETER_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, - VK_PERFORMANCE_PARAMETER_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL, - VK_PERFORMANCE_PARAMETER_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL - VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL + 1), - VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceParameterTypeINTEL; +#define VK_NV_ray_tracing_motion_blur 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" -typedef enum VkPerformanceValueTypeINTEL { - VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, - VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, - VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, - VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, - VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, - VK_PERFORMANCE_VALUE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, - VK_PERFORMANCE_VALUE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL, - VK_PERFORMANCE_VALUE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL - VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL + 1), - VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceValueTypeINTEL; -typedef union VkPerformanceValueDataINTEL { - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char* valueString; -} VkPerformanceValueDataINTEL; +typedef enum VkAccelerationStructureMotionInstanceTypeNV { + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMotionInstanceTypeNV; +typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV; +typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV; +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; -typedef struct VkPerformanceValueINTEL { - VkPerformanceValueTypeINTEL type; - VkPerformanceValueDataINTEL data; -} VkPerformanceValueINTEL; +typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR vertexData; +} VkAccelerationStructureGeometryMotionTrianglesDataNV; -typedef struct VkInitializePerformanceApiInfoINTEL { +typedef struct VkAccelerationStructureMotionInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t maxInstances; + VkAccelerationStructureMotionInfoFlagsNV flags; +} VkAccelerationStructureMotionInfoNV; + +typedef struct VkAccelerationStructureMatrixMotionInstanceNV { + VkTransformMatrixKHR transformT0; + VkTransformMatrixKHR transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureMatrixMotionInstanceNV; + +typedef struct VkSRTDataNV { + float sx; + float a; + float b; + float pvx; + float sy; + float c; + float pvy; + float sz; + float pvz; + float qx; + float qy; + float qz; + float qw; + float tx; + float ty; + float tz; +} VkSRTDataNV; + +typedef struct VkAccelerationStructureSRTMotionInstanceNV { + VkSRTDataNV transformT0; + VkSRTDataNV transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureSRTMotionInstanceNV; + +typedef union VkAccelerationStructureMotionInstanceDataNV { + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +} VkAccelerationStructureMotionInstanceDataNV; + +typedef struct VkAccelerationStructureMotionInstanceNV { + VkAccelerationStructureMotionInstanceTypeNV type; + VkAccelerationStructureMotionInstanceFlagsNV flags; + VkAccelerationStructureMotionInstanceDataNV data; +} VkAccelerationStructureMotionInstanceNV; + +typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { VkStructureType sType; - const void* pNext; - void* pUserData; -} VkInitializePerformanceApiInfoINTEL; + void* pNext; + VkBool32 rayTracingMotionBlur; + VkBool32 rayTracingMotionBlurPipelineTraceRaysIndirect; +} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; -typedef struct VkQueryPoolCreateInfoINTEL { - VkStructureType sType; - const void* pNext; - VkQueryPoolSamplingModeINTEL performanceCountersSampling; -} VkQueryPoolCreateInfoINTEL; -typedef struct VkPerformanceMarkerInfoINTEL { - VkStructureType sType; - const void* pNext; - uint64_t marker; -} VkPerformanceMarkerInfoINTEL; -typedef struct VkPerformanceStreamMarkerInfoINTEL { +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { VkStructureType sType; - const void* pNext; - uint32_t marker; -} VkPerformanceStreamMarkerInfoINTEL; - -typedef struct VkPerformanceOverrideInfoINTEL { - VkStructureType sType; - const void* pNext; - VkPerformanceOverrideTypeINTEL type; - VkBool32 enable; - uint64_t parameter; -} VkPerformanceOverrideInfoINTEL; + void* pNext; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; -typedef struct VkPerformanceConfigurationAcquireInfoINTEL { - VkStructureType sType; - const void* pNext; - VkPerformanceConfigurationTypeINTEL type; -} VkPerformanceConfigurationAcquireInfoINTEL; -typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); -typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); -typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); -typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); -typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); -typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( - VkDevice device, - const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; -VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( - VkDevice device); +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( - VkCommandBuffer commandBuffer, - const VkPerformanceMarkerInfoINTEL* pMarkerInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( - VkCommandBuffer commandBuffer, - const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( - VkCommandBuffer commandBuffer, - const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; -VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( - VkDevice device, - const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, - VkPerformanceConfigurationINTEL* pConfiguration); -VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( - VkDevice device, - VkPerformanceConfigurationINTEL configuration); -VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( - VkQueue queue, - VkPerformanceConfigurationINTEL configuration); +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( - VkDevice device, - VkPerformanceParameterTypeINTEL parameter, - VkPerformanceValueINTEL* pValue); -#endif -#define VK_EXT_pci_bus_info 1 -#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 -#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" -typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { VkStructureType sType; void* pNext; - uint32_t pciDomain; - uint32_t pciBus; - uint32_t pciDevice; - uint32_t pciFunction; -} VkPhysicalDevicePCIBusInfoPropertiesEXT; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; -#define VK_AMD_display_native_hdr 1 -#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 -#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" -typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { +#define VK_ARM_rasterization_order_attachment_access 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" +typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM { VkStructureType sType; - void* pNext; - VkBool32 localDimmingSupport; -} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + const void* pNext; + VkBool32 rasterizationOrderColorAttachmentAccess; + VkBool32 rasterizationOrderDepthAttachmentAccess; + VkBool32 rasterizationOrderStencilAttachmentAccess; +} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; -typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + + +#define VK_EXT_rgba10x6_formats 1 +#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 +#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" +typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { VkStructureType sType; - const void* pNext; - VkBool32 localDimmingEnable; -} VkSwapchainDisplayNativeHdrCreateInfoAMD; + void* pNext; + VkBool32 formatRgba10x6WithoutYCbCrSampler; +} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; -typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); + + +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( - VkDevice device, - VkSwapchainKHR swapChain, - VkBool32 localDimmingEnable); +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); #endif -#define VK_EXT_fragment_density_map 1 -#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 1 -#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" -typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +typedef struct VkMutableDescriptorTypeListVALVE { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListVALVE; + +typedef struct VkMutableDescriptorTypeCreateInfoVALVE { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoVALVE; + + + +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 fragmentDensityMap; - VkBool32 fragmentDensityMapDynamic; - VkBool32 fragmentDensityMapNonSubsampledImages; -} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; -typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; + +typedef struct VkVertexInputAttributeDescription2EXT { VkStructureType sType; void* pNext; - VkExtent2D minFragmentDensityTexelSize; - VkExtent2D maxFragmentDensityTexelSize; - VkBool32 fragmentDensityInvocations; -} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; -typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkAttachmentReference fragmentDensityMapAttachment; -} VkRenderPassFragmentDensityMapCreateInfoEXT; +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif -#define VK_EXT_scalar_block_layout 1 -#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" -typedef struct VkPhysicalDeviceScalarBlockLayoutFeaturesEXT { +#define VK_EXT_physical_device_drm 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" +typedef struct VkPhysicalDeviceDrmPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 scalarBlockLayout; -} VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; + VkBool32 hasPrimary; + VkBool32 hasRender; + int64_t primaryMajor; + int64_t primaryMinor; + int64_t renderMajor; + int64_t renderMinor; +} VkPhysicalDeviceDrmPropertiesEXT; -#define VK_GOOGLE_hlsl_functionality1 1 -#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION 1 -#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +#define VK_EXT_depth_clip_control 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" +typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipControl; +} VkPhysicalDeviceDepthClipControlFeaturesEXT; +typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 negativeOneToOne; +} VkPipelineViewportDepthClipControlCreateInfoEXT; -#define VK_GOOGLE_decorate_string 1 -#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 -#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" -#define VK_EXT_subgroup_size_control 1 -#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 -#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" -typedef struct VkPhysicalDeviceSubgroupSizeControlFeaturesEXT { +#define VK_EXT_primitive_topology_list_restart 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" +typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 subgroupSizeControl; - VkBool32 computeFullSubgroups; -} VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; + VkBool32 primitiveTopologyListRestart; + VkBool32 primitiveTopologyPatchListRestart; +} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + -typedef struct VkPhysicalDeviceSubgroupSizeControlPropertiesEXT { - VkStructureType sType; - void* pNext; - uint32_t minSubgroupSize; - uint32_t maxSubgroupSize; - uint32_t maxComputeWorkgroupSubgroups; - VkShaderStageFlags requiredSubgroupSizeStages; -} VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; -typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT { +#define VK_HUAWEI_subpass_shading 1 +#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 2 +#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" +typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { VkStructureType sType; void* pNext; - uint32_t requiredSubgroupSize; -} VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + VkRenderPass renderPass; + uint32_t subpass; +} VkSubpassShadingPipelineCreateInfoHUAWEI; +typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 subpassShading; +} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; +typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio; +} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; -#define VK_AMD_shader_core_properties2 1 -#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 -#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize); +typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer); -typedef enum VkShaderCorePropertiesFlagBitsAMD { - VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF -} VkShaderCorePropertiesFlagBitsAMD; -typedef VkFlags VkShaderCorePropertiesFlagsAMD; -typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { - VkStructureType sType; - void* pNext; - VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; - uint32_t activeComputeUnitCount; -} VkPhysicalDeviceShaderCoreProperties2AMD; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + VkDevice device, + VkRenderPass renderpass, + VkExtent2D* pMaxWorkgroupSize); +VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( + VkCommandBuffer commandBuffer); +#endif -#define VK_AMD_device_coherent_memory 1 -#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 -#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" -typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { +#define VK_HUAWEI_invocation_mask 1 +#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 +#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" +typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI { VkStructureType sType; void* pNext; - VkBool32 deviceCoherentMemory; -} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + VkBool32 invocationMask; +} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; +typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); - -#define VK_EXT_memory_budget 1 -#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 -#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" -typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { - VkStructureType sType; - void* pNext; - VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; - VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryBudgetPropertiesEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); +#endif +#define VK_NV_external_memory_rdma 1 +typedef void* VkRemoteAddressNV; +#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma" +typedef struct VkMemoryGetRemoteAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetRemoteAddressInfoNV; -#define VK_EXT_memory_priority 1 -#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 -#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" -typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { +typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 memoryPriority; -} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + VkBool32 externalMemoryRDMA; +} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; -typedef struct VkMemoryPriorityAllocateInfoEXT { - VkStructureType sType; - const void* pNext; - float priority; -} VkMemoryPriorityAllocateInfoEXT; +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( + VkDevice device, + const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV* pAddress); +#endif -#define VK_NV_dedicated_allocation_image_aliasing 1 -#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" -typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 dedicatedAllocationImageAliasing; -} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); -#define VK_EXT_buffer_device_address 1 -typedef uint64_t VkDeviceAddress; -#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 -#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" -typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 bufferDeviceAddress; - VkBool32 bufferDeviceAddressCaptureReplay; - VkBool32 bufferDeviceAddressMultiDevice; -} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); -typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif -typedef struct VkBufferDeviceAddressInfoEXT { + +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { VkStructureType sType; - const void* pNext; - VkBuffer buffer; -} VkBufferDeviceAddressInfoEXT; + void* pNext; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; -typedef struct VkBufferDeviceAddressCreateInfoEXT { +typedef struct VkPipelineColorWriteCreateInfoEXT { VkStructureType sType; const void* pNext; - VkDeviceAddress deviceAddress; -} VkBufferDeviceAddressCreateInfoEXT; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; -typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( - VkDevice device, - const VkBufferDeviceAddressInfoEXT* pInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); #endif -#define VK_EXT_separate_stencil_usage 1 -#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 -#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" -typedef struct VkImageStencilUsageCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkImageUsageFlags stencilUsage; -} VkImageStencilUsageCreateInfoEXT; - - +#define VK_EXT_global_priority_query 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE_KHR +typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; -#define VK_EXT_validation_features 1 -#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 2 -#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" +typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT; -typedef enum VkValidationFeatureEnableEXT { - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, - VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, - VK_VALIDATION_FEATURE_ENABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, - VK_VALIDATION_FEATURE_ENABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, - VK_VALIDATION_FEATURE_ENABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT + 1), - VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationFeatureEnableEXT; -typedef enum VkValidationFeatureDisableEXT { - VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, - VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, - VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, - VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, - VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, - VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, - VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, - VK_VALIDATION_FEATURE_DISABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, - VK_VALIDATION_FEATURE_DISABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, - VK_VALIDATION_FEATURE_DISABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - VK_VALIDATION_FEATURE_DISABLE_ALL_EXT + 1), - VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationFeatureDisableEXT; -typedef struct VkValidationFeaturesEXT { - VkStructureType sType; - const void* pNext; - uint32_t enabledValidationFeatureCount; - const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; - uint32_t disabledValidationFeatureCount; - const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; -} VkValidationFeaturesEXT; +#define VK_EXT_image_view_min_lod 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" +typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 minLod; +} VkPhysicalDeviceImageViewMinLodFeaturesEXT; +typedef struct VkImageViewMinLodCreateInfoEXT { + VkStructureType sType; + const void* pNext; + float minLod; +} VkImageViewMinLodCreateInfoEXT; -#define VK_NV_cooperative_matrix 1 -#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 -#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" -typedef enum VkComponentTypeNV { - VK_COMPONENT_TYPE_FLOAT16_NV = 0, - VK_COMPONENT_TYPE_FLOAT32_NV = 1, - VK_COMPONENT_TYPE_FLOAT64_NV = 2, - VK_COMPONENT_TYPE_SINT8_NV = 3, - VK_COMPONENT_TYPE_SINT16_NV = 4, - VK_COMPONENT_TYPE_SINT32_NV = 5, - VK_COMPONENT_TYPE_SINT64_NV = 6, - VK_COMPONENT_TYPE_UINT8_NV = 7, - VK_COMPONENT_TYPE_UINT16_NV = 8, - VK_COMPONENT_TYPE_UINT32_NV = 9, - VK_COMPONENT_TYPE_UINT64_NV = 10, - VK_COMPONENT_TYPE_BEGIN_RANGE_NV = VK_COMPONENT_TYPE_FLOAT16_NV, - VK_COMPONENT_TYPE_END_RANGE_NV = VK_COMPONENT_TYPE_UINT64_NV, - VK_COMPONENT_TYPE_RANGE_SIZE_NV = (VK_COMPONENT_TYPE_UINT64_NV - VK_COMPONENT_TYPE_FLOAT16_NV + 1), - VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkComponentTypeNV; -typedef enum VkScopeNV { - VK_SCOPE_DEVICE_NV = 1, - VK_SCOPE_WORKGROUP_NV = 2, - VK_SCOPE_SUBGROUP_NV = 3, - VK_SCOPE_QUEUE_FAMILY_NV = 5, - VK_SCOPE_BEGIN_RANGE_NV = VK_SCOPE_DEVICE_NV, - VK_SCOPE_END_RANGE_NV = VK_SCOPE_QUEUE_FAMILY_NV, - VK_SCOPE_RANGE_SIZE_NV = (VK_SCOPE_QUEUE_FAMILY_NV - VK_SCOPE_DEVICE_NV + 1), - VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkScopeNV; -typedef struct VkCooperativeMatrixPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t MSize; - uint32_t NSize; - uint32_t KSize; - VkComponentTypeNV AType; - VkComponentTypeNV BType; - VkComponentTypeNV CType; - VkComponentTypeNV DType; - VkScopeNV scope; -} VkCooperativeMatrixPropertiesNV; +#define VK_EXT_multi_draw 1 +#define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 +#define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" +typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multiDraw; +} VkPhysicalDeviceMultiDrawFeaturesEXT; -typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { +typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 cooperativeMatrix; - VkBool32 cooperativeMatrixRobustBufferAccess; -} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + uint32_t maxMultiDrawCount; +} VkPhysicalDeviceMultiDrawPropertiesEXT; -typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { - VkStructureType sType; - void* pNext; - VkShaderStageFlags cooperativeMatrixSupportedStages; -} VkPhysicalDeviceCooperativeMatrixPropertiesNV; +typedef struct VkMultiDrawInfoEXT { + uint32_t firstVertex; + uint32_t vertexCount; +} VkMultiDrawInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); +typedef struct VkMultiDrawIndexedInfoEXT { + uint32_t firstIndex; + uint32_t indexCount; + int32_t vertexOffset; +} VkMultiDrawIndexedInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkCooperativeMatrixPropertiesNV* pProperties); +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT* pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT* pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t* pVertexOffset); #endif -#define VK_NV_coverage_reduction_mode 1 -#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 -#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" +#define VK_EXT_load_store_op_none 1 +#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" -typedef enum VkCoverageReductionModeNV { - VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, - VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, - VK_COVERAGE_REDUCTION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, - VK_COVERAGE_REDUCTION_MODE_END_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV, - VK_COVERAGE_REDUCTION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV - VK_COVERAGE_REDUCTION_MODE_MERGE_NV + 1), - VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageReductionModeNV; -typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; -typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + +#define VK_EXT_border_color_swizzle 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" +typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 coverageReductionMode; -} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + VkBool32 borderColorSwizzle; + VkBool32 borderColorSwizzleFromImage; +} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; -typedef struct VkPipelineCoverageReductionStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageReductionStateCreateFlagsNV flags; - VkCoverageReductionModeNV coverageReductionMode; -} VkPipelineCoverageReductionStateCreateInfoNV; +typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkComponentMapping components; + VkBool32 srgb; +} VkSamplerBorderColorComponentMappingCreateInfoEXT; -typedef struct VkFramebufferMixedSamplesCombinationNV { - VkStructureType sType; - void* pNext; - VkCoverageReductionModeNV coverageReductionMode; - VkSampleCountFlagBits rasterizationSamples; - VkSampleCountFlags depthStencilSamples; - VkSampleCountFlags colorSamples; -} VkFramebufferMixedSamplesCombinationNV; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); + +#define VK_EXT_pageable_device_local_memory 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" +typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pageableDeviceLocalMemory; +} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice device, VkDeviceMemory memory, float priority); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - VkPhysicalDevice physicalDevice, - uint32_t* pCombinationCount, - VkFramebufferMixedSamplesCombinationNV* pCombinations); +VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( + VkDevice device, + VkDeviceMemory memory, + float priority); #endif -#define VK_EXT_fragment_shader_interlock 1 -#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 -#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" -typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { +#define VK_QCOM_fragment_density_map_offset 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { VkStructureType sType; void* pNext; - VkBool32 fragmentShaderSampleInterlock; - VkBool32 fragmentShaderPixelInterlock; - VkBool32 fragmentShaderShadingRateInterlock; -} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + VkBool32 fragmentDensityMapOffset; +} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D fragmentDensityOffsetGranularity; +} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; +typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t fragmentDensityOffsetCount; + const VkOffset2D* pFragmentDensityOffsets; +} VkSubpassFragmentDensityMapOffsetEndInfoQCOM; -#define VK_EXT_ycbcr_image_arrays 1 -#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 -#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" -typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { + + +#define VK_NV_linear_color_attachment 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" +typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 ycbcrImageArrays; -} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; - + VkBool32 linearColorAttachment; +} VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + + +#define VK_GOOGLE_surfaceless_query 1 +#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 1 +#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" + + +#define VK_KHR_acceleration_structure 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 +#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" + +typedef enum VkBuildAccelerationStructureModeKHR { + VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureModeKHR; + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; + +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; + +typedef enum VkAccelerationStructureCreateFlagBitsKHR { + VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004, + VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCreateFlagBitsKHR; +typedef VkFlags VkAccelerationStructureCreateFlagsKHR; +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; +typedef struct VkAccelerationStructureBuildRangeInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildRangeInfoKHR; -#define VK_EXT_headless_surface 1 -#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 0 -#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" -typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; -typedef struct VkHeadlessSurfaceCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkHeadlessSurfaceCreateFlagsEXT flags; -} VkHeadlessSurfaceCreateInfoEXT; +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + uint32_t maxVertex; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( - VkInstance instance, - const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; -#define VK_EXT_line_rasterization 1 -#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 -#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBuildAccelerationStructureModeKHR mode; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* pGeometries; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureCreateFlagsKHR createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkAccelerationStructureTypeKHR type; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; -typedef enum VkLineRasterizationModeEXT { - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1, - VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3, - VK_LINE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, - VK_LINE_RASTERIZATION_MODE_END_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, - VK_LINE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT + 1), - VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkLineRasterizationModeEXT; -typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT { +typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR { VkStructureType sType; void* pNext; - VkBool32 rectangularLines; - VkBool32 bresenhamLines; - VkBool32 smoothLines; - VkBool32 stippledRectangularLines; - VkBool32 stippledBresenhamLines; - VkBool32 stippledSmoothLines; -} VkPhysicalDeviceLineRasterizationFeaturesEXT; + VkBool32 accelerationStructure; + VkBool32 accelerationStructureCaptureReplay; + VkBool32 accelerationStructureIndirectBuild; + VkBool32 accelerationStructureHostCommands; + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind; +} VkPhysicalDeviceAccelerationStructureFeaturesKHR; -typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT { +typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR { VkStructureType sType; void* pNext; - uint32_t lineSubPixelPrecisionBits; -} VkPhysicalDeviceLineRasterizationPropertiesEXT; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxPerStageDescriptorAccelerationStructures; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures; + uint32_t minAccelerationStructureScratchOffsetAlignment; +} VkPhysicalDeviceAccelerationStructurePropertiesKHR; -typedef struct VkPipelineRasterizationLineStateCreateInfoEXT { +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { VkStructureType sType; const void* pNext; - VkLineRasterizationModeEXT lineRasterizationMode; - VkBool32 stippledLineEnable; - uint32_t lineStippleFactor; - uint16_t lineStipplePattern; -} VkPipelineRasterizationLineStateCreateInfoEXT; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; -typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); +typedef struct VkAccelerationStructureVersionInfoKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkAccelerationStructureVersionInfoKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef struct VkAccelerationStructureBuildSizesInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, - uint32_t lineStippleFactor, - uint16_t lineStipplePattern); -#endif + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts); -#define VK_EXT_host_query_reset 1 -#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 -#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" -typedef struct VkPhysicalDeviceHostQueryResetFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 hostQueryReset; -} VkPhysicalDeviceHostQueryResetFeaturesEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); -typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount); + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); #endif -#define VK_EXT_index_type_uint8 1 -#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 -#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" -typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 indexTypeUint8; -} VkPhysicalDeviceIndexTypeUint8FeaturesEXT; +#define VK_KHR_ray_tracing_pipeline 1 +#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" +typedef enum VkShaderGroupShaderKHR { + VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0, + VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1, + VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2, + VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3, + VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF +} VkShaderGroupShaderKHR; +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPipelineRayPayloadSize; + uint32_t maxPipelineRayHitAttributeSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxPipelineRayRecursionDepth; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPipeline; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingPipelineTraceRaysIndirect; + VkBool32 rayTraversalPrimitiveCulling; +} VkPhysicalDeviceRayTracingPipelineFeaturesKHR; -#define VK_EXT_shader_demote_to_helper_invocation 1 -#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 -#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" -typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT { +typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { VkStructureType sType; void* pNext; - VkBool32 shaderDemoteToHelperInvocation; -} VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + uint32_t shaderGroupHandleSize; + uint32_t maxRayRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint32_t shaderGroupHandleCaptureReplaySize; + uint32_t maxRayDispatchInvocationCount; + uint32_t shaderGroupHandleAlignment; + uint32_t maxRayHitAttributeSize; +} VkPhysicalDeviceRayTracingPipelinePropertiesKHR; +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; -#define VK_EXT_texel_buffer_alignment 1 -#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 -#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" -typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 texelBufferAlignment; -} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader); +typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize); -typedef struct VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT { +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress); + +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( + VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize); +#endif + + +#define VK_KHR_ray_query 1 +#define VK_KHR_RAY_QUERY_SPEC_VERSION 1 +#define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" +typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { VkStructureType sType; void* pNext; - VkDeviceSize storageTexelBufferOffsetAlignmentBytes; - VkBool32 storageTexelBufferOffsetSingleTexelAlignment; - VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; - VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; -} VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; + VkBool32 rayQuery; +} VkPhysicalDeviceRayQueryFeaturesKHR; #ifdef __cplusplus diff --git a/Externals/Vulkan/Include/vulkan/vulkan_directfb.h b/Externals/Vulkan/Include/vulkan/vulkan_directfb.h new file mode 100644 index 000000000000..ab3504efafd1 --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_directfb.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright 2015-2022 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_enums.hpp b/Externals/Vulkan/Include/vulkan/vulkan_enums.hpp new file mode 100644 index 000000000000..ea2a434aa9dc --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_enums.hpp @@ -0,0 +1,18755 @@ +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_ENUMS_HPP +#define VULKAN_ENUMS_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + template + struct CppType + {}; + + template + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + + VULKAN_HPP_INLINE std::string toHexString( uint32_t value ) + { + std::stringstream stream; + stream << std::hex << value; + return stream.str(); + } + + //============= + //=== ENUMs === + //============= + + //=== VK_VERSION_1_0 === + + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Result value ) + { + switch ( value ) + { + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorUnknown: return "ErrorUnknown"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorFragmentation: return "ErrorFragmentation"; + case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; + case Result::ePipelineCompileRequired: return "PipelineCompileRequired"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorNotPermittedKHR: return "ErrorNotPermittedKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eThreadIdleKHR: return "ThreadIdleKHR"; + case Result::eThreadDoneKHR: return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + eDescriptorSetVariableDescriptorCountAllocateInfo = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountLayoutSupport = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES, + ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES, + ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + ePhysicalDeviceShaderTerminateInvocationFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + ePhysicalDevicePipelineCreationCacheControlFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + ePipelineShaderStageRequiredSubgroupSizeCreateInfo = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + ePhysicalDeviceTextureCompressionAstcHdrFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO, + eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + ePhysicalDeviceShaderIntegerDotProductFeatures = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + ePhysicalDeviceShaderIntegerDotProductProperties = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoProfileKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR, + eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, + eVideoPictureResourceKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR, + eVideoGetMemoryPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR, + eVideoBindMemoryKHR = VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR, + eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, + eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, + eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, + eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, + eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, + eVideoReferenceSlotKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR, + eVideoQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR, + eVideoProfilesKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR, + ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, + eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, + eQueueFamilyQueryResultStatusProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_2_KHR, + eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, + eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, + eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT, + eVideoEncodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT, + eVideoEncodeH264SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT, + eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT, + eVideoEncodeH264NaluSliceEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT, + eVideoEncodeH264EmitPictureParametersEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT, + eVideoEncodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT, + eVideoEncodeH264RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT, + eVideoEncodeH264RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT, + eVideoEncodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT, + eVideoEncodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_EXT, + eVideoEncodeH265SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoEncodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoEncodeH265VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT, + eVideoEncodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT, + eVideoEncodeH265NaluSliceEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_EXT, + eVideoEncodeH265EmitPictureParametersEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_EXT, + eVideoEncodeH265ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_EXT, + eVideoEncodeH265ReferenceListsEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_EXT, + eVideoEncodeH265RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT, + eVideoEncodeH265RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT, + eVideoDecodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT, + eVideoDecodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT, + eVideoDecodeH264PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT, + eVideoDecodeH264MvcEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT, + eVideoDecodeH264ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT, + eVideoDecodeH264SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoDecodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, + eRenderingFragmentShadingRateAttachmentInfoKHR = + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + eRenderingFragmentDensityMapAttachmentInfoEXT = + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, + eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, +#if defined( VK_USE_PLATFORM_GGP ) + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, +#if defined( VK_USE_PLATFORM_VI_NN ) + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, +#endif /*VK_USE_PLATFORM_VI_NN*/ + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, + ePipelineRasterizationDepthClipStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, + ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, + eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, + ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, + eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, + ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, + ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + eAndroidHardwareBufferFormatProperties2ANDROID = + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceAccelerationStructureFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR, + ePhysicalDeviceAccelerationStructurePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR, + ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR, + ePhysicalDeviceRayTracingPipelinePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, + ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, + ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePipelineViewportShadingRateImageStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT, + eVideoDecodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT, + eVideoDecodeH265SessionParametersCreateInfoEXT = + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT, + eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT, + eVideoDecodeH265ProfileEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT, + eVideoDecodeH265PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT, + eVideoDecodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_GGP ) + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + ePipelineFragmentShadingRateStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceFragmentShadingRatePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, + ePhysicalDeviceFragmentShadingRateFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, + ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, + ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT, + ePipelineRasterizationProvokingVertexStateCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT, + ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicStateFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, + ePhysicalDeviceInheritedViewportScissorFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV, + eCommandBufferInheritanceViewportScissorInfoNV = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT, + eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT, + eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT, + ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, + eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, + ePhysicalDeviceCustomBorderColorPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, + ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR, + ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, + eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, + eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, + ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, + ePipelineFragmentShadingRateEnumStateCreateInfoNV = + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, + eAccelerationStructureGeometryMotionTrianglesDataNV = + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV, + ePhysicalDeviceRayTracingMotionBlurFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV, + eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV, + ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, + ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, + ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, + eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, + ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, + eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, + eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, + ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT, + ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT, + ePipelineViewportDepthClipControlCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT, + ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, + eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, + eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA, + eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eBufferCollectionCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA, + eImportMemoryBufferCollectionFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA, + eBufferCollectionImageCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA, + eBufferCollectionPropertiesFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA, + eBufferConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA, + eBufferCollectionBufferCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA, + eImageConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA, + eImageFormatConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA, + eSysmemColorSpaceFUCHSIA = VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA, + eBufferCollectionConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI, + ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI, + ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI, + ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI, + eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV, + ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV, + ePhysicalDeviceExtendedDynamicState2FeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, + ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, + ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT, + eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT, + ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT, + ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT, + ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, + eSamplerBorderColorComponentMappingCreateInfoEXT = + VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, + ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, + ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, + eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, + ePhysicalDeviceLinearColorAttachmentFeaturesNV = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR, + eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR, + ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR, + ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePipelineTessellationDomainOriginStateCreateInfoKHR = + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR, + eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( StructureType value ) + { + switch ( value ) + { + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: + return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: + return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: + return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: + return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: + return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features"; + case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties"; + case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features"; + case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties"; + case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo"; + case StructureType::eAttachmentDescription2: return "AttachmentDescription2"; + case StructureType::eAttachmentReference2: return "AttachmentReference2"; + case StructureType::eSubpassDescription2: return "SubpassDescription2"; + case StructureType::eSubpassDependency2: return "SubpassDependency2"; + case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2"; + case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo"; + case StructureType::eSubpassEndInfo: return "SubpassEndInfo"; + case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures"; + case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties"; + case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features"; + case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features"; + case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: + return "DescriptorSetLayoutBindingFlagsCreateInfo"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures"; + case StructureType::ePhysicalDeviceDescriptorIndexingProperties: + return "PhysicalDeviceDescriptorIndexingProperties"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: + return "DescriptorSetVariableDescriptorCountAllocateInfo"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: + return "DescriptorSetVariableDescriptorCountLayoutSupport"; + case StructureType::ePhysicalDeviceDepthStencilResolveProperties: + return "PhysicalDeviceDepthStencilResolveProperties"; + case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures"; + case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: + return "PhysicalDeviceSamplerFilterMinmaxProperties"; + case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: + return "PhysicalDeviceImagelessFramebufferFeatures"; + case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo"; + case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo"; + case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: + return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; + case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: + return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; + case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: + return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; + case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout"; + case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout"; + case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: + return "PhysicalDeviceTimelineSemaphoreProperties"; + case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo"; + case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo"; + case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo"; + case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: + return "PhysicalDeviceBufferDeviceAddressFeatures"; + case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo"; + case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo"; + case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo"; + case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo"; + case StructureType::ePhysicalDeviceVulkan13Features: return "PhysicalDeviceVulkan13Features"; + case StructureType::ePhysicalDeviceVulkan13Properties: return "PhysicalDeviceVulkan13Properties"; + case StructureType::ePipelineCreationFeedbackCreateInfo: return "PipelineCreationFeedbackCreateInfo"; + case StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures: + return "PhysicalDeviceShaderTerminateInvocationFeatures"; + case StructureType::ePhysicalDeviceToolProperties: return "PhysicalDeviceToolProperties"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures: + return "PhysicalDeviceShaderDemoteToHelperInvocationFeatures"; + case StructureType::ePhysicalDevicePrivateDataFeatures: return "PhysicalDevicePrivateDataFeatures"; + case StructureType::eDevicePrivateDataCreateInfo: return "DevicePrivateDataCreateInfo"; + case StructureType::ePrivateDataSlotCreateInfo: return "PrivateDataSlotCreateInfo"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures: + return "PhysicalDevicePipelineCreationCacheControlFeatures"; + case StructureType::eMemoryBarrier2: return "MemoryBarrier2"; + case StructureType::eBufferMemoryBarrier2: return "BufferMemoryBarrier2"; + case StructureType::eImageMemoryBarrier2: return "ImageMemoryBarrier2"; + case StructureType::eDependencyInfo: return "DependencyInfo"; + case StructureType::eSubmitInfo2: return "SubmitInfo2"; + case StructureType::eSemaphoreSubmitInfo: return "SemaphoreSubmitInfo"; + case StructureType::eCommandBufferSubmitInfo: return "CommandBufferSubmitInfo"; + case StructureType::ePhysicalDeviceSynchronization2Features: return "PhysicalDeviceSynchronization2Features"; + case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures: + return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures"; + case StructureType::ePhysicalDeviceImageRobustnessFeatures: return "PhysicalDeviceImageRobustnessFeatures"; + case StructureType::eCopyBufferInfo2: return "CopyBufferInfo2"; + case StructureType::eCopyImageInfo2: return "CopyImageInfo2"; + case StructureType::eCopyBufferToImageInfo2: return "CopyBufferToImageInfo2"; + case StructureType::eCopyImageToBufferInfo2: return "CopyImageToBufferInfo2"; + case StructureType::eBlitImageInfo2: return "BlitImageInfo2"; + case StructureType::eResolveImageInfo2: return "ResolveImageInfo2"; + case StructureType::eBufferCopy2: return "BufferCopy2"; + case StructureType::eImageCopy2: return "ImageCopy2"; + case StructureType::eImageBlit2: return "ImageBlit2"; + case StructureType::eBufferImageCopy2: return "BufferImageCopy2"; + case StructureType::eImageResolve2: return "ImageResolve2"; + case StructureType::ePhysicalDeviceSubgroupSizeControlProperties: + return "PhysicalDeviceSubgroupSizeControlProperties"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo: + return "PipelineShaderStageRequiredSubgroupSizeCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeatures: + return "PhysicalDeviceSubgroupSizeControlFeatures"; + case StructureType::ePhysicalDeviceInlineUniformBlockFeatures: return "PhysicalDeviceInlineUniformBlockFeatures"; + case StructureType::ePhysicalDeviceInlineUniformBlockProperties: + return "PhysicalDeviceInlineUniformBlockProperties"; + case StructureType::eWriteDescriptorSetInlineUniformBlock: return "WriteDescriptorSetInlineUniformBlock"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfo: + return "DescriptorPoolInlineUniformBlockCreateInfo"; + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures: + return "PhysicalDeviceTextureCompressionAstcHdrFeatures"; + case StructureType::eRenderingInfo: return "RenderingInfo"; + case StructureType::eRenderingAttachmentInfo: return "RenderingAttachmentInfo"; + case StructureType::ePipelineRenderingCreateInfo: return "PipelineRenderingCreateInfo"; + case StructureType::ePhysicalDeviceDynamicRenderingFeatures: return "PhysicalDeviceDynamicRenderingFeatures"; + case StructureType::eCommandBufferInheritanceRenderingInfo: return "CommandBufferInheritanceRenderingInfo"; + case StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures: + return "PhysicalDeviceShaderIntegerDotProductFeatures"; + case StructureType::ePhysicalDeviceShaderIntegerDotProductProperties: + return "PhysicalDeviceShaderIntegerDotProductProperties"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentProperties: + return "PhysicalDeviceTexelBufferAlignmentProperties"; + case StructureType::eFormatProperties3: return "FormatProperties3"; + case StructureType::ePhysicalDeviceMaintenance4Features: return "PhysicalDeviceMaintenance4Features"; + case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties"; + case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements"; + case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: + return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoProfileKHR: return "VideoProfileKHR"; + case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR"; + case StructureType::eVideoPictureResourceKHR: return "VideoPictureResourceKHR"; + case StructureType::eVideoGetMemoryPropertiesKHR: return "VideoGetMemoryPropertiesKHR"; + case StructureType::eVideoBindMemoryKHR: return "VideoBindMemoryKHR"; + case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR"; + case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR"; + case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR"; + case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR"; + case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR"; + case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR"; + case StructureType::eVideoReferenceSlotKHR: return "VideoReferenceSlotKHR"; + case StructureType::eVideoQueueFamilyProperties2KHR: return "VideoQueueFamilyProperties2KHR"; + case StructureType::eVideoProfilesKHR: return "VideoProfilesKHR"; + case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR"; + case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR"; + case StructureType::eQueueFamilyQueryResultStatusProperties2KHR: + return "QueueFamilyQueryResultStatusProperties2KHR"; + case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: + return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: + return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: + return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; + case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; + case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT"; + case StructureType::eVideoEncodeH264SessionCreateInfoEXT: return "VideoEncodeH264SessionCreateInfoEXT"; + case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT: + return "VideoEncodeH264SessionParametersCreateInfoEXT"; + case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT: + return "VideoEncodeH264SessionParametersAddInfoEXT"; + case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT"; + case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT"; + case StructureType::eVideoEncodeH264NaluSliceEXT: return "VideoEncodeH264NaluSliceEXT"; + case StructureType::eVideoEncodeH264EmitPictureParametersEXT: return "VideoEncodeH264EmitPictureParametersEXT"; + case StructureType::eVideoEncodeH264ProfileEXT: return "VideoEncodeH264ProfileEXT"; + case StructureType::eVideoEncodeH264RateControlInfoEXT: return "VideoEncodeH264RateControlInfoEXT"; + case StructureType::eVideoEncodeH264RateControlLayerInfoEXT: return "VideoEncodeH264RateControlLayerInfoEXT"; + case StructureType::eVideoEncodeH265CapabilitiesEXT: return "VideoEncodeH265CapabilitiesEXT"; + case StructureType::eVideoEncodeH265SessionCreateInfoEXT: return "VideoEncodeH265SessionCreateInfoEXT"; + case StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT: + return "VideoEncodeH265SessionParametersCreateInfoEXT"; + case StructureType::eVideoEncodeH265SessionParametersAddInfoEXT: + return "VideoEncodeH265SessionParametersAddInfoEXT"; + case StructureType::eVideoEncodeH265VclFrameInfoEXT: return "VideoEncodeH265VclFrameInfoEXT"; + case StructureType::eVideoEncodeH265DpbSlotInfoEXT: return "VideoEncodeH265DpbSlotInfoEXT"; + case StructureType::eVideoEncodeH265NaluSliceEXT: return "VideoEncodeH265NaluSliceEXT"; + case StructureType::eVideoEncodeH265EmitPictureParametersEXT: return "VideoEncodeH265EmitPictureParametersEXT"; + case StructureType::eVideoEncodeH265ProfileEXT: return "VideoEncodeH265ProfileEXT"; + case StructureType::eVideoEncodeH265ReferenceListsEXT: return "VideoEncodeH265ReferenceListsEXT"; + case StructureType::eVideoEncodeH265RateControlInfoEXT: return "VideoEncodeH265RateControlInfoEXT"; + case StructureType::eVideoEncodeH265RateControlLayerInfoEXT: return "VideoEncodeH265RateControlLayerInfoEXT"; + case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT"; + case StructureType::eVideoDecodeH264SessionCreateInfoEXT: return "VideoDecodeH264SessionCreateInfoEXT"; + case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT"; + case StructureType::eVideoDecodeH264MvcEXT: return "VideoDecodeH264MvcEXT"; + case StructureType::eVideoDecodeH264ProfileEXT: return "VideoDecodeH264ProfileEXT"; + case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT: + return "VideoDecodeH264SessionParametersCreateInfoEXT"; + case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT: + return "VideoDecodeH264SessionParametersAddInfoEXT"; + case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; + case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: + return "RenderingFragmentShadingRateAttachmentInfoKHR"; + case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: + return "RenderingFragmentDensityMapAttachmentInfoEXT"; + case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; + case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: + return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; +#if defined( VK_USE_PLATFORM_VI_NN ) + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; +#endif /*VK_USE_PLATFORM_VI_NN*/ + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: + return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: + return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: + return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: + return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: + return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: + return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: + return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: + return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: + return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: + return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: + return "PhysicalDevicePerformanceQueryFeaturesKHR"; + case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: + return "PhysicalDevicePerformanceQueryPropertiesKHR"; + case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR"; + case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR"; + case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR"; + case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR"; + case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: + return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: + return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; + case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: + return "AndroidHardwareBufferFormatProperties2ANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: + return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: + return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: + return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: + return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: + return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR: + return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR: + return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR: + return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR: + return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR: + return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: + return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR: + return "PhysicalDeviceAccelerationStructureFeaturesKHR"; + case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR: + return "PhysicalDeviceAccelerationStructurePropertiesKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR"; + case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR: + return "PhysicalDeviceRayTracingPipelineFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR: + return "PhysicalDeviceRayTracingPipelinePropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR"; + case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: + return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: + return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: + return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: + return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eDrmFormatModifierPropertiesList2EXT: return "DrmFormatModifierPropertiesList2EXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR: + return "PhysicalDevicePortabilitySubsetFeaturesKHR"; + case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: + return "PhysicalDevicePortabilitySubsetPropertiesKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: + return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: + return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: + return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV: + return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: + return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: + return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: + return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: + return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: + return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: + return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT"; + case StructureType::eVideoDecodeH265SessionCreateInfoEXT: return "VideoDecodeH265SessionCreateInfoEXT"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT: + return "VideoDecodeH265SessionParametersCreateInfoEXT"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT: + return "VideoDecodeH265SessionParametersAddInfoEXT"; + case StructureType::eVideoDecodeH265ProfileEXT: return "VideoDecodeH265ProfileEXT"; + case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT"; + case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR"; + case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR: + return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR"; + case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: + return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: + return "PipelineVertexInputDivisorStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: + return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: + return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV: + return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: + return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: + return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: + return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: + return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: + return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: + return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; + case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: + return "PipelineFragmentShadingRateStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: + return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: + return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: + return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: + return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: + return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDevicePresentWaitFeaturesKHR: return "PhysicalDevicePresentWaitFeaturesKHR"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: + return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: + return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: + return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV: + return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: + return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: + return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT"; + case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT: + return "PipelineRasterizationProvokingVertexStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT: + return "PhysicalDeviceProvokingVertexPropertiesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: + return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: + return "PhysicalDeviceLineRasterizationFeaturesEXT"; + case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: + return "PipelineRasterizationLineStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: + return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: + return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: + return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: + return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR: + return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: + return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: + return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: + return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: + return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; + case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV: + return "PhysicalDeviceInheritedViewportScissorFeaturesNV"; + case StructureType::eCommandBufferInheritanceViewportScissorInfoNV: + return "CommandBufferInheritanceViewportScissorInfoNV"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: + return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: + return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: + return "PhysicalDeviceDeviceMemoryReportFeaturesEXT"; + case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT"; + case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT"; + case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT"; + case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT"; + case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: + return "PhysicalDeviceCustomBorderColorPropertiesEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: + return "PhysicalDeviceCustomBorderColorFeaturesEXT"; + case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePresentIdKHR: return "PresentIdKHR"; + case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR"; + case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR"; + case StructureType::eVideoEncodeRateControlLayerInfoKHR: return "VideoEncodeRateControlLayerInfoKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: + return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; + case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR: + return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: + return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV: + return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV"; + case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV: + return "PipelineFragmentShadingRateEnumStateCreateInfoNV"; + case StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV: + return "AccelerationStructureGeometryMotionTrianglesDataNV"; + case StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV: + return "PhysicalDeviceRayTracingMotionBlurFeaturesNV"; + case StructureType::eAccelerationStructureMotionInfoNV: return "AccelerationStructureMotionInfoNV"; + case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT: + return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT: + return "PhysicalDeviceFragmentDensityMap2FeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT: + return "PhysicalDeviceFragmentDensityMap2PropertiesEXT"; + case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM"; + case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: + return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; + case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM: + return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM"; + case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT"; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE: + return "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE"; + case StructureType::eMutableDescriptorTypeCreateInfoVALVE: return "MutableDescriptorTypeCreateInfoVALVE"; + case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: + return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT"; + case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; + case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; + case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT"; + case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: + return "PhysicalDeviceDepthClipControlFeaturesEXT"; + case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: + return "PipelineViewportDepthClipControlCreateInfoEXT"; + case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: + return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; + case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; + case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA"; + case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA"; + case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA"; + case StructureType::eBufferCollectionCreateInfoFUCHSIA: return "BufferCollectionCreateInfoFUCHSIA"; + case StructureType::eImportMemoryBufferCollectionFUCHSIA: return "ImportMemoryBufferCollectionFUCHSIA"; + case StructureType::eBufferCollectionImageCreateInfoFUCHSIA: return "BufferCollectionImageCreateInfoFUCHSIA"; + case StructureType::eBufferCollectionPropertiesFUCHSIA: return "BufferCollectionPropertiesFUCHSIA"; + case StructureType::eBufferConstraintsInfoFUCHSIA: return "BufferConstraintsInfoFUCHSIA"; + case StructureType::eBufferCollectionBufferCreateInfoFUCHSIA: return "BufferCollectionBufferCreateInfoFUCHSIA"; + case StructureType::eImageConstraintsInfoFUCHSIA: return "ImageConstraintsInfoFUCHSIA"; + case StructureType::eImageFormatConstraintsInfoFUCHSIA: return "ImageFormatConstraintsInfoFUCHSIA"; + case StructureType::eSysmemColorSpaceFUCHSIA: return "SysmemColorSpaceFUCHSIA"; + case StructureType::eBufferCollectionConstraintsInfoFUCHSIA: return "BufferCollectionConstraintsInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::eSubpassShadingPipelineCreateInfoHUAWEI: return "SubpassShadingPipelineCreateInfoHUAWEI"; + case StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI: + return "PhysicalDeviceSubpassShadingFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI: + return "PhysicalDeviceSubpassShadingPropertiesHUAWEI"; + case StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI: + return "PhysicalDeviceInvocationMaskFeaturesHUAWEI"; + case StructureType::eMemoryGetRemoteAddressInfoNV: return "MemoryGetRemoteAddressInfoNV"; + case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV: + return "PhysicalDeviceExternalMemoryRdmaFeaturesNV"; + case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT: + return "PhysicalDeviceExtendedDynamicState2FeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT: + return "PhysicalDeviceColorWriteEnableFeaturesEXT"; + case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT"; + case StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT: return "PhysicalDeviceImageViewMinLodFeaturesEXT"; + case StructureType::eImageViewMinLodCreateInfoEXT: return "ImageViewMinLodCreateInfoEXT"; + case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT"; + case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT"; + case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT: + return "PhysicalDeviceBorderColorSwizzleFeaturesEXT"; + case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: + return "SamplerBorderColorComponentMappingCreateInfoEXT"; + case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: + return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM: + return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM"; + case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM: + return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM"; + case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM: + return "SubpassFragmentDensityMapOffsetEndInfoQCOM"; + case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: + return "PhysicalDeviceLinearColorAttachmentFeaturesNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, + eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX, + eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, + ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ObjectType value ) + { + switch ( value ) + { + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::ePrivateDataSlot: return "PrivateDataSlot"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ObjectType::eVideoSessionKHR: return "VideoSessionKHR"; + case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ObjectType::eCuModuleNVX: return "CuModuleNVX"; + case ObjectType::eCuFunctionNVX: return "CuFunctionNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VendorId + { + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN, + eCodeplay = VK_VENDOR_ID_CODEPLAY, + eMESA = VK_VENDOR_ID_MESA, + ePocl = VK_VENDOR_ID_POCL + }; + + VULKAN_HPP_INLINE std::string to_string( VendorId value ) + { + switch ( value ) + { + case VendorId::eVIV: return "VIV"; + case VendorId::eVSI: return "VSI"; + case VendorId::eKazan: return "Kazan"; + case VendorId::eCodeplay: return "Codeplay"; + case VendorId::eMESA: return "MESA"; + case VendorId::ePocl: return "Pocl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) + { + switch ( value ) + { + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + eG8B8R82Plane444Unorm = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + eG10X6B10X6R10X62Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X42Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + eG16B16R162Plane444Unorm = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + eA4R4G4B4UnormPack16 = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + eA4B4G4R4UnormPack16 = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + eAstc4x4SfloatBlock = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + eAstc5x4SfloatBlock = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + eAstc5x5SfloatBlock = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + eAstc6x5SfloatBlock = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + eAstc6x6SfloatBlock = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + eAstc8x5SfloatBlock = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + eAstc8x6SfloatBlock = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + eAstc8x8SfloatBlock = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + eAstc10x5SfloatBlock = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + eAstc10x6SfloatBlock = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + eAstc10x8SfloatBlock = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + eAstc10x10SfloatBlock = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + eAstc12x10SfloatBlock = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( Format value ) + { + switch ( value ) + { + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::eG8B8R82Plane444Unorm: return "G8B8R82Plane444Unorm"; + case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "G10X6B10X6R10X62Plane444Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "G12X4B12X4R12X42Plane444Unorm3Pack16"; + case Format::eG16B16R162Plane444Unorm: return "G16B16R162Plane444Unorm"; + case Format::eA4R4G4B4UnormPack16: return "A4R4G4B4UnormPack16"; + case Format::eA4B4G4R4UnormPack16: return "A4B4G4R4UnormPack16"; + case Format::eAstc4x4SfloatBlock: return "Astc4x4SfloatBlock"; + case Format::eAstc5x4SfloatBlock: return "Astc5x4SfloatBlock"; + case Format::eAstc5x5SfloatBlock: return "Astc5x5SfloatBlock"; + case Format::eAstc6x5SfloatBlock: return "Astc6x5SfloatBlock"; + case Format::eAstc6x6SfloatBlock: return "Astc6x6SfloatBlock"; + case Format::eAstc8x5SfloatBlock: return "Astc8x5SfloatBlock"; + case Format::eAstc8x6SfloatBlock: return "Astc8x6SfloatBlock"; + case Format::eAstc8x8SfloatBlock: return "Astc8x8SfloatBlock"; + case Format::eAstc10x5SfloatBlock: return "Astc10x5SfloatBlock"; + case Format::eAstc10x6SfloatBlock: return "Astc10x6SfloatBlock"; + case Format::eAstc10x8SfloatBlock: return "Astc10x8SfloatBlock"; + case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock"; + case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock"; + case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FormatFeatureFlagBits : VkFormatFeatureFlags + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eSampledImageYcbcrConversionLinearFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value ) + { + switch ( value ) + { + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: + return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: + return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: + return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageCreateFlagBits : VkImageCreateFlags + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) + { + switch ( value ) + { + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM: return "FragmentDensityMapOffsetQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, + eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageTiling value ) + { + switch ( value ) + { + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + VULKAN_HPP_INLINE std::string to_string( ImageType value ) + { + switch ( value ) + { + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageUsageFlagBits : VkImageUsageFlags + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) + { + switch ( value ) + { + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value ) + { + switch ( value ) + { + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryHeapFlagBits : VkMemoryHeapFlags + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value ) + { + switch ( value ) + { + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, + eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD, + eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value ) + { + switch ( value ) + { + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD"; + case MemoryPropertyFlagBits::eRdmaCapableNV: return "RdmaCapableNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) + { + switch ( value ) + { + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueueFlagBits : VkQueueFlags + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) + { + switch ( value ) + { + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR"; + case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SampleCountFlagBits : VkSampleCountFlags + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value ) + { + switch ( value ) + { + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) + { + switch ( value ) + { + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class InstanceCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits ) + { + return "(void)"; + } + + enum class DeviceCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineStageFlagBits : VkPipelineStageFlags + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eNone = VK_PIPELINE_STAGE_NONE, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) + { + switch ( value ) + { + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eNone: return "None"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryMapFlagBits : VkMemoryMapFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + { + return "(void)"; + } + + enum class ImageAspectFlagBits : VkImageAspectFlags + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, + eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) + { + switch ( value ) + { + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; + case ImageAspectFlagBits::eNoneKHR: return "NoneKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value ) + { + switch ( value ) + { + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value ) + { + switch ( value ) + { + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FenceCreateFlagBits : VkFenceCreateFlags + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) + { + switch ( value ) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) + { + return "(void)"; + } + + enum class EventCreateFlagBits : VkEventCreateFlags + { + eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value ) + { + switch ( value ) + { + case EventCreateFlagBits::eDeviceOnly: return "DeviceOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) + { + switch ( value ) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: + return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryResultFlagBits : VkQueryResultFlags + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) + { + switch ( value ) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( QueryType value ) + { + switch ( value ) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; + case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; + case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; + case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPoolCreateFlagBits + { + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) + { + return "(void)"; + } + + enum class BufferCreateFlagBits : VkBufferCreateFlags + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, + eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) + { + switch ( value ) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BufferUsageFlagBits : VkBufferUsageFlags + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) + { + switch ( value ) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: + return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + VULKAN_HPP_INLINE std::string to_string( SharingMode value ) + { + switch ( value ) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BufferViewCreateFlagBits : VkBufferViewCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) + { + return "(void)"; + } + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) + { + switch ( value ) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal"; + case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal"; + case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal"; + case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; + case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal"; + case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; + case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) + { + switch ( value ) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags + { + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) + { + switch ( value ) + { + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) + { + switch ( value ) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) + { + return "(void)"; + } + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) + { + switch ( value ) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOp value ) + { + switch ( value ) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ColorComponentFlagBits : VkColorComponentFlags + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) + { + switch ( value ) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + VULKAN_HPP_INLINE std::string to_string( CompareOp value ) + { + switch ( value ) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CullModeFlagBits : VkCullModeFlags + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) + { + switch ( value ) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eCullMode = VK_DYNAMIC_STATE_CULL_MODE, + eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE, + ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP, + eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, + ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, + eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, + eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DynamicState value ) + { + switch ( value ) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eCullMode: return "CullMode"; + case DynamicState::eFrontFace: return "FrontFace"; + case DynamicState::ePrimitiveTopology: return "PrimitiveTopology"; + case DynamicState::eViewportWithCount: return "ViewportWithCount"; + case DynamicState::eScissorWithCount: return "ScissorWithCount"; + case DynamicState::eVertexInputBindingStride: return "VertexInputBindingStride"; + case DynamicState::eDepthTestEnable: return "DepthTestEnable"; + case DynamicState::eDepthWriteEnable: return "DepthWriteEnable"; + case DynamicState::eDepthCompareOp: return "DepthCompareOp"; + case DynamicState::eDepthBoundsTestEnable: return "DepthBoundsTestEnable"; + case DynamicState::eStencilTestEnable: return "StencilTestEnable"; + case DynamicState::eStencilOp: return "StencilOp"; + case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable"; + case DynamicState::eDepthBiasEnable: return "DepthBiasEnable"; + case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; + case DynamicState::eLineStippleEXT: return "LineStippleEXT"; + case DynamicState::eVertexInputEXT: return "VertexInputEXT"; + case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; + case DynamicState::eLogicOpEXT: return "LogicOpEXT"; + case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + VULKAN_HPP_INLINE std::string to_string( FrontFace value ) + { + switch ( value ) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + VULKAN_HPP_INLINE std::string to_string( LogicOp value ) + { + switch ( value ) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCreateFlagBits : VkPipelineCreateFlags + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: + return "RenderingFragmentShadingRateAttachmentKHR"; + case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: + return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: + return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: + return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) + { + switch ( value ) + { + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroups: return "RequireFullSubgroups"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) + { + switch ( value ) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) + { + switch ( value ) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderStageFlagBits : VkShaderStageFlags + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) + { + switch ( value ) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR: return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR: return "CallableKHR"; + case ShaderStageFlagBits::eTaskNV: return "TaskNV"; + case ShaderStageFlagBits::eMeshNV: return "MeshNV"; + case ShaderStageFlagBits::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + VULKAN_HPP_INLINE std::string to_string( StencilOp value ) + { + switch ( value ) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) + { + switch ( value ) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, + eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BorderColor value ) + { + switch ( value ) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + case BorderColor::eFloatCustomEXT: return "FloatCustomEXT"; + case BorderColor::eIntCustomEXT: return "IntCustomEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG, + eCubicEXT = VK_FILTER_CUBIC_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Filter value ) + { + switch ( value ) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicIMG: return "CubicIMG"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) + { + switch ( value ) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerCreateFlagBits : VkSamplerCreateFlags + { + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) + { + switch ( value ) + { + case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) + { + switch ( value ) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorPoolCreateFlagBits::eHostOnlyVALVE: return "HostOnlyVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags + { + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE: return "HostOnlyPoolVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) + { + switch ( value ) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlock: return "InlineUniformBlock"; + case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case DescriptorType::eMutableVALVE: return "MutableVALVE"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) + { + return "(void)"; + } + + enum class AccessFlagBits : VkAccessFlags + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eNone = VK_ACCESS_NONE, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eNoneKHR = VK_ACCESS_NONE_KHR, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) + { + switch ( value ) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eNone: return "None"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) + { + switch ( value ) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) + { + switch ( value ) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + case AttachmentLoadOp::eNoneEXT: return "NoneEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, + eNone = VK_ATTACHMENT_STORE_OP_NONE, + eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT, + eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR, + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) + { + switch ( value ) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + case AttachmentStoreOp::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DependencyFlagBits : VkDependencyFlags + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) + { + switch ( value ) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags + { + eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) + { + switch ( value ) + { + case FramebufferCreateFlagBits::eImageless: return "Imageless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) + { + switch ( value ) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; + case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; + case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags + { + eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM + }; + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value ) + { + switch ( value ) + { + case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, + eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, + eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, + eRasterizationOrderAttachmentColorAccessARM = + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentDepthAccessARM = + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessARM = + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + { + switch ( value ) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM"; + case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessARM: + return "RasterizationOrderAttachmentColorAccessARM"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessARM: + return "RasterizationOrderAttachmentDepthAccessARM"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessARM: + return "RasterizationOrderAttachmentStencilAccessARM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) + { + switch ( value ) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) + { + switch ( value ) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) + { + switch ( value ) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) + { + switch ( value ) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) + { + switch ( value ) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryControlFlagBits : VkQueryControlFlags + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) + { + switch ( value ) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eNoneNV = VK_INDEX_TYPE_NONE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndexType value ) + { + switch ( value ) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eNoneKHR: return "NoneKHR"; + case IndexType::eUint8EXT: return "Uint8EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class StencilFaceFlagBits : VkStencilFaceFlags + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value ) + { + switch ( value ) + { + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassContents value ) + { + switch ( value ) + { + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_1 === + + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) + { + switch ( value ) + { + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT + }; + using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) + { + switch ( value ) + { + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress"; + case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) + { + return "(void)"; + } + + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + using PointClippingBehaviorKHR = PointClippingBehavior; + + VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) + { + switch ( value ) + { + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + using TessellationDomainOriginKHR = TessellationDomainOrigin; + + VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value ) + { + switch ( value ) + { + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) + { + switch ( value ) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) + { + switch ( value ) + { + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + using SamplerYcbcrRangeKHR = SamplerYcbcrRange; + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) + { + switch ( value ) + { + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT + }; + using ChromaLocationKHR = ChromaLocation; + + VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) + { + switch ( value ) + { + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) + { + switch ( value ) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorUpdateTemplateCreateFlagBits : VkDescriptorUpdateTemplateCreateFlags + { + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) + { + return "(void)"; + } + + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV + }; + using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV: return "RdmaAddressNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FenceImportFlagBits : VkFenceImportFlags + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + using FenceImportFlagBitsKHR = FenceImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) + { + switch ( value ) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + { + switch ( value ) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT + }; + using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_2 === + + enum class DriverId + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, + eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, + eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY, + eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY, + eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP, + eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV, + eMesaPanvk = VK_DRIVER_ID_MESA_PANVK, + eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY, + eMesaVenus = VK_DRIVER_ID_MESA_VENUS + }; + using DriverIdKHR = DriverId; + + VULKAN_HPP_INLINE std::string to_string( DriverId value ) + { + switch ( value ) + { + case DriverId::eAmdProprietary: return "AmdProprietary"; + case DriverId::eAmdOpenSource: return "AmdOpenSource"; + case DriverId::eMesaRadv: return "MesaRadv"; + case DriverId::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA"; + case DriverId::eImaginationProprietary: return "ImaginationProprietary"; + case DriverId::eQualcommProprietary: return "QualcommProprietary"; + case DriverId::eArmProprietary: return "ArmProprietary"; + case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader"; + case DriverId::eGgpProprietary: return "GgpProprietary"; + case DriverId::eBroadcomProprietary: return "BroadcomProprietary"; + case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe"; + case DriverId::eMoltenvk: return "Moltenvk"; + case DriverId::eCoreaviProprietary: return "CoreaviProprietary"; + case DriverId::eJuiceProprietary: return "JuiceProprietary"; + case DriverId::eVerisiliconProprietary: return "VerisiliconProprietary"; + case DriverId::eMesaTurnip: return "MesaTurnip"; + case DriverId::eMesaV3Dv: return "MesaV3Dv"; + case DriverId::eMesaPanvk: return "MesaPanvk"; + case DriverId::eSamsungProprietary: return "SamsungProprietary"; + case DriverId::eMesaVenus: return "MesaVenus"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderFloatControlsIndependence + { + e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE + }; + using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; + + VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value ) + { + switch ( value ) + { + case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly"; + case ShaderFloatControlsIndependence::eAll: return "All"; + case ShaderFloatControlsIndependence::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT + }; + using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value ) + { + switch ( value ) + { + case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ResolveModeFlagBits : VkResolveModeFlags + { + eNone = VK_RESOLVE_MODE_NONE, + eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, + eMin = VK_RESOLVE_MODE_MIN_BIT, + eMax = VK_RESOLVE_MODE_MAX_BIT + }; + using ResolveModeFlagBitsKHR = ResolveModeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value ) + { + switch ( value ) + { + case ResolveModeFlagBits::eNone: return "None"; + case ResolveModeFlagBits::eSampleZero: return "SampleZero"; + case ResolveModeFlagBits::eAverage: return "Average"; + case ResolveModeFlagBits::eMin: return "Min"; + case ResolveModeFlagBits::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SamplerReductionMode + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX + }; + using SamplerReductionModeEXT = SamplerReductionMode; + + VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value ) + { + switch ( value ) + { + case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionMode::eMin: return "Min"; + case SamplerReductionMode::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreType + { + eBinary = VK_SEMAPHORE_TYPE_BINARY, + eTimeline = VK_SEMAPHORE_TYPE_TIMELINE + }; + using SemaphoreTypeKHR = SemaphoreType; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreType value ) + { + switch ( value ) + { + case SemaphoreType::eBinary: return "Binary"; + case SemaphoreType::eTimeline: return "Timeline"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags + { + eAny = VK_SEMAPHORE_WAIT_ANY_BIT + }; + using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value ) + { + switch ( value ) + { + case SemaphoreWaitFlagBits::eAny: return "Any"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_3 === + + enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags + { + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT + }; + using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBits value ) + { + switch ( value ) + { + case PipelineCreationFeedbackFlagBits::eValid: return "Valid"; + case PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration: return "BasePipelineAcceleration"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ToolPurposeFlagBits : VkToolPurposeFlags + { + eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT, + eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT, + eTracing = VK_TOOL_PURPOSE_TRACING_BIT, + eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + eDebugReportingEXT = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, + eDebugMarkersEXT = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + }; + using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBits value ) + { + switch ( value ) + { + case ToolPurposeFlagBits::eValidation: return "Validation"; + case ToolPurposeFlagBits::eProfiling: return "Profiling"; + case ToolPurposeFlagBits::eTracing: return "Tracing"; + case ToolPurposeFlagBits::eAdditionalFeatures: return "AdditionalFeatures"; + case ToolPurposeFlagBits::eModifyingFeatures: return "ModifyingFeatures"; + case ToolPurposeFlagBits::eDebugReportingEXT: return "DebugReportingEXT"; + case ToolPurposeFlagBits::eDebugMarkersEXT: return "DebugMarkersEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags + { + }; + using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineStageFlagBits2 : VkPipelineStageFlags2 + { + eNone = VK_PIPELINE_STAGE_2_NONE, + eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT, + eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_2_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, + eCopy = VK_PIPELINE_STAGE_2_COPY_BIT, + eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT, + eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT, + eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT, + eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT, + eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT, + ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, + eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, + eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, + eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, + eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT + }; + using PipelineStageFlagBits2KHR = PipelineStageFlagBits2; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2 value ) + { + switch ( value ) + { + case PipelineStageFlagBits2::eNone: return "None"; + case PipelineStageFlagBits2::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits2::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits2::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits2::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits2::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits2::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits2::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits2::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits2::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits2::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits2::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits2::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits2::eAllTransfer: return "AllTransfer"; + case PipelineStageFlagBits2::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits2::eHost: return "Host"; + case PipelineStageFlagBits2::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits2::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits2::eCopy: return "Copy"; + case PipelineStageFlagBits2::eResolve: return "Resolve"; + case PipelineStageFlagBits2::eBlit: return "Blit"; + case PipelineStageFlagBits2::eClear: return "Clear"; + case PipelineStageFlagBits2::eIndexInput: return "IndexInput"; + case PipelineStageFlagBits2::eVertexAttributeInput: return "VertexAttributeInput"; + case PipelineStageFlagBits2::ePreRasterizationShaders: return "PreRasterizationShaders"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineStageFlagBits2::eVideoDecodeKHR: return "VideoDecodeKHR"; + case PipelineStageFlagBits2::eVideoEncodeKHR: return "VideoEncodeKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineStageFlagBits2::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits2::eCommandPreprocessNV: return "CommandPreprocessNV"; + case PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits2::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits2::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits2::eTaskShaderNV: return "TaskShaderNV"; + case PipelineStageFlagBits2::eMeshShaderNV: return "MeshShaderNV"; + case PipelineStageFlagBits2::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccessFlagBits2 : VkAccessFlags2 + { + eNone = VK_ACCESS_2_NONE, + eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_2_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_2_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_2_HOST_READ_BIT, + eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT, + eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT, + eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT, + eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, + eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, + eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI, + eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV + }; + using AccessFlagBits2KHR = AccessFlagBits2; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2 value ) + { + switch ( value ) + { + case AccessFlagBits2::eNone: return "None"; + case AccessFlagBits2::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits2::eIndexRead: return "IndexRead"; + case AccessFlagBits2::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits2::eUniformRead: return "UniformRead"; + case AccessFlagBits2::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits2::eShaderRead: return "ShaderRead"; + case AccessFlagBits2::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits2::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits2::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits2::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits2::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits2::eTransferRead: return "TransferRead"; + case AccessFlagBits2::eTransferWrite: return "TransferWrite"; + case AccessFlagBits2::eHostRead: return "HostRead"; + case AccessFlagBits2::eHostWrite: return "HostWrite"; + case AccessFlagBits2::eMemoryRead: return "MemoryRead"; + case AccessFlagBits2::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits2::eShaderSampledRead: return "ShaderSampledRead"; + case AccessFlagBits2::eShaderStorageRead: return "ShaderStorageRead"; + case AccessFlagBits2::eShaderStorageWrite: return "ShaderStorageWrite"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case AccessFlagBits2::eVideoDecodeReadKHR: return "VideoDecodeReadKHR"; + case AccessFlagBits2::eVideoDecodeWriteKHR: return "VideoDecodeWriteKHR"; + case AccessFlagBits2::eVideoEncodeReadKHR: return "VideoEncodeReadKHR"; + case AccessFlagBits2::eVideoEncodeWriteKHR: return "VideoEncodeWriteKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case AccessFlagBits2::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits2::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits2::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits2::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits2::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits2::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + case AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits2::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits2::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits2::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class SubmitFlagBits : VkSubmitFlags + { + eProtected = VK_SUBMIT_PROTECTED_BIT + }; + using SubmitFlagBitsKHR = SubmitFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SubmitFlagBits value ) + { + switch ( value ) + { + case SubmitFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class RenderingFlagBits : VkRenderingFlags + { + eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + eSuspending = VK_RENDERING_SUSPENDING_BIT, + eResuming = VK_RENDERING_RESUMING_BIT + }; + using RenderingFlagBitsKHR = RenderingFlagBits; + + VULKAN_HPP_INLINE std::string to_string( RenderingFlagBits value ) + { + switch ( value ) + { + case RenderingFlagBits::eContentsSecondaryCommandBuffers: return "ContentsSecondaryCommandBuffers"; + case RenderingFlagBits::eSuspending: return "Suspending"; + case RenderingFlagBits::eResuming: return "Resuming"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2 + { + eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, + eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = + VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = + VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT, + eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT, + eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT, + eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT + }; + using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2; + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits2 value ) + { + switch ( value ) + { + case FormatFeatureFlagBits2::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits2::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits2::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits2::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits2::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits2::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits2::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits2::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits2::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; + case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits2::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits2::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter: + return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter: + return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit: + return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits2::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits2::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat"; + case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat"; + case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_surface === + + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + { + switch ( value ) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) + { + switch ( value ) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) + { + switch ( value ) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_swapchain === + + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + { + switch ( value ) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + { + switch ( value ) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_display === + + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) + { + return "(void)"; + } + + enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) + { + switch ( value ) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, + eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) + { + switch ( value ) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX"; + case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_rasterization_order === + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) + { + switch ( value ) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR + { + eInvalid = VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR, +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT, + eEncodeH265EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT, + eDecodeH264EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT, + eDecodeH265EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodecOperationFlagBitsKHR::eInvalid: return "Invalid"; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT"; + case VideoCodecOperationFlagBitsKHR::eEncodeH265EXT: return "EncodeH265EXT"; + case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT"; + case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT"; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR + { + eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR, + eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, + e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, + e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, + e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value ) + { + switch ( value ) + { + case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid"; + case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome"; + case VideoChromaSubsamplingFlagBitsKHR::e420: return "420"; + case VideoChromaSubsamplingFlagBitsKHR::e422: return "422"; + case VideoChromaSubsamplingFlagBitsKHR::e444: return "444"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR + { + eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR, + e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, + e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, + e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value ) + { + switch ( value ) + { + case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid"; + case VideoComponentBitDepthFlagBitsKHR::e8: return "8"; + case VideoComponentBitDepthFlagBitsKHR::e10: return "10"; + case VideoComponentBitDepthFlagBitsKHR::e12: return "12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR + { + eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR, + eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCapabilityFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoCapabilityFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR + { + eDefault = VK_VIDEO_SESSION_CREATE_DEFAULT_KHR, + eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value ) + { + switch ( value ) + { + case VideoSessionCreateFlagBitsKHR::eDefault: return "Default"; + case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR + { + eDefault = VK_VIDEO_CODING_CONTROL_DEFAULT_KHR, + eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingControlFlagBitsKHR::eDefault: return "Default"; + case VideoCodingControlFlagBitsKHR::eReset: return "Reset"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoCodingQualityPresetFlagBitsKHR : VkVideoCodingQualityPresetFlagsKHR + { + eNormal = VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR, + ePower = VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR, + eQuality = VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingQualityPresetFlagBitsKHR::eNormal: return "Normal"; + case VideoCodingQualityPresetFlagBitsKHR::ePower: return "Power"; + case VideoCodingQualityPresetFlagBitsKHR::eQuality: return "Quality"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryResultStatusKHR + { + eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, + eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, + eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value ) + { + switch ( value ) + { + case QueryResultStatusKHR::eError: return "Error"; + case QueryResultStatusKHR::eNotReady: return "NotReady"; + case QueryResultStatusKHR::eComplete: return "Complete"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR ) + { + return "(void)"; + } + + enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR + { + eDefault = VK_VIDEO_DECODE_DEFAULT_KHR, + eReserved0 = VK_VIDEO_DECODE_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeFlagBitsKHR::eDefault: return "Default"; + case VideoDecodeFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkPipelineRasterizationStateStreamCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) + { + return "(void)"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + + enum class VideoEncodeH264CapabilityFlagBitsEXT : VkVideoEncodeH264CapabilityFlagsEXT + { + eCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT, + eCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT, + eWeightedBiPredImplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT, + eTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT, + eChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT, + eSecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT, + eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT, + eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT, + eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT, + eMultipleSlicePerFrame = VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT, + eEvenlyDistributedSliceSize = VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264CapabilityFlagBitsEXT::eCabac: return "Cabac"; + case VideoEncodeH264CapabilityFlagBitsEXT::eCavlc: return "Cavlc"; + case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBiPredImplicit: return "WeightedBiPredImplicit"; + case VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8: return "Transform8X8"; + case VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset: return "ChromaQpOffset"; + case VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset: return "SecondChromaQpOffset"; + case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled: return "DeblockingFilterDisabled"; + case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled: return "DeblockingFilterEnabled"; + case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial: return "DeblockingFilterPartial"; + case VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame: return "MultipleSlicePerFrame"; + case VideoEncodeH264CapabilityFlagBitsEXT::eEvenlyDistributedSliceSize: return "EvenlyDistributedSliceSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT + { + eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT, + eSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT, + eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame"; + case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice"; + case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT + { + eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT, + eSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT, + eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame"; + case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice"; + case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264CreateFlagBitsEXT : VkVideoEncodeH264CreateFlagsEXT + { + eDefault = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT, + eReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264CreateFlagBitsEXT::eDefault: return "Default"; + case VideoEncodeH264CreateFlagBitsEXT::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH264RateControlStructureFlagBitsEXT : VkVideoEncodeH264RateControlStructureFlagsEXT + { + eUnknown = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT, + eFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT, + eDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown: return "Unknown"; + case VideoEncodeH264RateControlStructureFlagBitsEXT::eFlat: return "Flat"; + case VideoEncodeH264RateControlStructureFlagBitsEXT::eDyadic: return "Dyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h265 === + + enum class VideoEncodeH265InputModeFlagBitsEXT : VkVideoEncodeH265InputModeFlagsEXT + { + eFrame = VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT, + eSlice = VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_BIT_EXT, + eNonVcl = VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH265InputModeFlagBitsEXT::eFrame: return "Frame"; + case VideoEncodeH265InputModeFlagBitsEXT::eSlice: return "Slice"; + case VideoEncodeH265InputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH265OutputModeFlagBitsEXT : VkVideoEncodeH265OutputModeFlagsEXT + { + eFrame = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT, + eSlice = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_BIT_EXT, + eNonVcl = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH265OutputModeFlagBitsEXT::eFrame: return "Frame"; + case VideoEncodeH265OutputModeFlagBitsEXT::eSlice: return "Slice"; + case VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH265CtbSizeFlagBitsEXT : VkVideoEncodeH265CtbSizeFlagsEXT + { + e8 = VK_VIDEO_ENCODE_H265_CTB_SIZE_8_BIT_EXT, + e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT, + e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT, + e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH265CtbSizeFlagBitsEXT::e8: return "8"; + case VideoEncodeH265CtbSizeFlagBitsEXT::e16: return "16"; + case VideoEncodeH265CtbSizeFlagBitsEXT::e32: return "32"; + case VideoEncodeH265CtbSizeFlagBitsEXT::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH265RateControlStructureFlagBitsEXT : VkVideoEncodeH265RateControlStructureFlagsEXT + { + eUnknown = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT, + eFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_BIT_EXT, + eDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureFlagBitsEXT value ) + { + switch ( value ) + { + case VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown: return "Unknown"; + case VideoEncodeH265RateControlStructureFlagBitsEXT::eFlat: return "Flat"; + case VideoEncodeH265RateControlStructureFlagBitsEXT::eDyadic: return "Dyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeH265CapabilityFlagBitsEXT : VkVideoEncodeH265CapabilityFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsEXT ) + { + return "(void)"; + } + + enum class VideoEncodeH265CreateFlagBitsEXT : VkVideoEncodeH265CreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h264 === + + enum class VideoDecodeH264PictureLayoutFlagBitsEXT : VkVideoDecodeH264PictureLayoutFlagsEXT + { + eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_EXT, + eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_EXT, + eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsEXT value ) + { + switch ( value ) + { + case VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive: return "Progressive"; + case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines: return "InterlacedInterleavedLines"; + case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoDecodeH264CreateFlagBitsEXT : VkVideoDecodeH264CreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_shader_info === + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) + { + switch ( value ) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP + { + }; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_flags === + + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value ) + { + switch ( value ) + { + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN + { + }; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT + { + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) + { + switch ( value ) + { + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_surface_counter === + + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) + { + switch ( value ) + { + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_control === + + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) + { + switch ( value ) + { + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) + { + switch ( value ) + { + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) + { + switch ( value ) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_viewport_swizzle === + + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value ) + { + switch ( value ) + { + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkPipelineViewportSwizzleStateCreateFlagsNV + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_discard_rectangles === + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) + { + switch ( value ) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkPipelineDiscardRectangleStateCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_conservative_rasterization === + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) + { + switch ( value ) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class + PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_depth_clip_enable === + + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_performance_query === + + enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR + { + ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value ) + { + switch ( value ) + { + case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting"; + case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterScopeKHR + { + eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) + { + switch ( value ) + { + case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer"; + case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass"; + case PerformanceCounterScopeKHR::eCommand: return "Command"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterStorageKHR + { + eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, + eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, + eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, + eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, + eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, + eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value ) + { + switch ( value ) + { + case PerformanceCounterStorageKHR::eInt32: return "Int32"; + case PerformanceCounterStorageKHR::eInt64: return "Int64"; + case PerformanceCounterStorageKHR::eUint32: return "Uint32"; + case PerformanceCounterStorageKHR::eUint64: return "Uint64"; + case PerformanceCounterStorageKHR::eFloat32: return "Float32"; + case PerformanceCounterStorageKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceCounterUnitKHR + { + eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, + ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, + eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, + eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, + eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, + eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, + eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, + eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, + eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, + eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, + eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value ) + { + switch ( value ) + { + case PerformanceCounterUnitKHR::eGeneric: return "Generic"; + case PerformanceCounterUnitKHR::ePercentage: return "Percentage"; + case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds"; + case PerformanceCounterUnitKHR::eBytes: return "Bytes"; + case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond"; + case PerformanceCounterUnitKHR::eKelvin: return "Kelvin"; + case PerformanceCounterUnitKHR::eWatts: return "Watts"; + case PerformanceCounterUnitKHR::eVolts: return "Volts"; + case PerformanceCounterUnitKHR::eAmps: return "Amps"; + case PerformanceCounterUnitKHR::eHertz: return "Hertz"; + case PerformanceCounterUnitKHR::eCycles: return "Cycles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR + { + }; + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) + { + return "(void)"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK + { + }; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK + { + }; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) + { + return "(void)"; + } + + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_blend_operation_advanced === + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) + { + switch ( value ) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_coverage_to_color === + + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkPipelineCoverageToColorStateCreateFlagsNV + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_KHR_acceleration_structure === + + enum class AccelerationStructureTypeKHR + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR + }; + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel"; + case AccelerationStructureTypeKHR::eGeneric: return "Generic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureBuildTypeKHR + { + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureBuildTypeKHR::eHost: return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice: return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR + }; + using GeometryFlagBitsNV = GeometryFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryFlagBitsKHR::eOpaque: return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR + { + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, + eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV + }; + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing"; + case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV + }; + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; + case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CopyAccelerationStructureModeKHR + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR + }; + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; + + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case CopyAccelerationStructureModeKHR::eClone: return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact: return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class GeometryTypeKHR + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR + }; + using GeometryTypeNV = GeometryTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) + { + switch ( value ) + { + case GeometryTypeKHR::eTriangles: return "Triangles"; + case GeometryTypeKHR::eAabbs: return "Aabbs"; + case GeometryTypeKHR::eInstances: return "Instances"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureCompatibilityKHR + { + eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR, + eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value ) + { + switch ( value ) + { + case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible"; + case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR + { + eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value ) + { + switch ( value ) + { + case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case AccelerationStructureCreateFlagBitsKHR::eMotionNV: return "MotionNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class BuildAccelerationStructureModeKHR + { + eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, + eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureModeKHR::eBuild: return "Build"; + case BuildAccelerationStructureModeKHR::eUpdate: return "Update"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_framebuffer_mixed_samples === + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) + { + switch ( value ) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkPipelineCoverageModulationStateCreateFlagsNV + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_validation_cache === + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) + { + switch ( value ) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ValidationCacheCreateFlagBitsEXT : VkValidationCacheCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_NV_shading_rate_image === + + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) + { + switch ( value ) + { + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) + { + switch ( value ) + { + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing === + + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_pipeline_compiler_control === + + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_calibrated_timestamps === + + enum class TimeDomainEXT + { + eDevice = VK_TIME_DOMAIN_DEVICE_EXT, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) + { + switch ( value ) + { + case TimeDomainEXT::eDevice: return "Device"; + case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h265 === + + enum class VideoDecodeH265CreateFlagBitsEXT : VkVideoDecodeH265CreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_global_priority === + + enum class QueueGlobalPriorityKHR + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR + }; + using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR; + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value ) + { + switch ( value ) + { + case QueueGlobalPriorityKHR::eLow: return "Low"; + case QueueGlobalPriorityKHR::eMedium: return "Medium"; + case QueueGlobalPriorityKHR::eHigh: return "High"; + case QueueGlobalPriorityKHR::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_memory_overallocation_behavior === + + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) + { + switch ( value ) + { + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_INTEL_performance_query === + + enum class PerformanceConfigurationTypeINTEL + { + eCommandQueueMetricsDiscoveryActivated = + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) + { + switch ( value ) + { + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: + return "CommandQueueMetricsDiscoveryActivated"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class QueryPoolSamplingModeINTEL + { + eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) + { + switch ( value ) + { + case QueryPoolSamplingModeINTEL::eManual: return "Manual"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceOverrideTypeINTEL + { + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) + { + switch ( value ) + { + case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceParameterTypeINTEL + { + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) + { + switch ( value ) + { + case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PerformanceValueTypeINTEL + { + eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, + eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) + { + switch ( value ) + { + case PerformanceValueTypeINTEL::eUint32: return "Uint32"; + case PerformanceValueTypeINTEL::eUint64: return "Uint64"; + case PerformanceValueTypeINTEL::eFloat: return "Float"; + case PerformanceValueTypeINTEL::eBool: return "Bool"; + case PerformanceValueTypeINTEL::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA + { + }; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + enum class FragmentShadingRateCombinerOpKHR + { + eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR, + eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR, + eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR, + eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR, + eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value ) + { + switch ( value ) + { + case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep"; + case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace"; + case FragmentShadingRateCombinerOpKHR::eMin: return "Min"; + case FragmentShadingRateCombinerOpKHR::eMax: return "Max"; + case FragmentShadingRateCombinerOpKHR::eMul: return "Mul"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_core_properties2 === + + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD + { + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_validation_features === + + enum class ValidationFeatureEnableEXT + { + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) + { + switch ( value ) + { + case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf"; + case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ValidationFeatureDisableEXT + { + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, + eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) + { + switch ( value ) + { + case ValidationFeatureDisableEXT::eAll: return "All"; + case ValidationFeatureDisableEXT::eShaders: return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles"; + case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cooperative_matrix === + + enum class ScopeNV + { + eDevice = VK_SCOPE_DEVICE_NV, + eWorkgroup = VK_SCOPE_WORKGROUP_NV, + eSubgroup = VK_SCOPE_SUBGROUP_NV, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) + { + switch ( value ) + { + case ScopeNV::eDevice: return "Device"; + case ScopeNV::eWorkgroup: return "Workgroup"; + case ScopeNV::eSubgroup: return "Subgroup"; + case ScopeNV::eQueueFamily: return "QueueFamily"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ComponentTypeNV + { + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, + eSint8 = VK_COMPONENT_TYPE_SINT8_NV, + eSint16 = VK_COMPONENT_TYPE_SINT16_NV, + eSint32 = VK_COMPONENT_TYPE_SINT32_NV, + eSint64 = VK_COMPONENT_TYPE_SINT64_NV, + eUint8 = VK_COMPONENT_TYPE_UINT8_NV, + eUint16 = VK_COMPONENT_TYPE_UINT16_NV, + eUint32 = VK_COMPONENT_TYPE_UINT32_NV, + eUint64 = VK_COMPONENT_TYPE_UINT64_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) + { + switch ( value ) + { + case ComponentTypeNV::eFloat16: return "Float16"; + case ComponentTypeNV::eFloat32: return "Float32"; + case ComponentTypeNV::eFloat64: return "Float64"; + case ComponentTypeNV::eSint8: return "Sint8"; + case ComponentTypeNV::eSint16: return "Sint16"; + case ComponentTypeNV::eSint32: return "Sint32"; + case ComponentTypeNV::eSint64: return "Sint64"; + case ComponentTypeNV::eUint8: return "Uint8"; + case ComponentTypeNV::eUint16: return "Uint16"; + case ComponentTypeNV::eUint32: return "Uint32"; + case ComponentTypeNV::eUint64: return "Uint64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_coverage_reduction_mode === + + enum class CoverageReductionModeNV + { + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) + { + switch ( value ) + { + case CoverageReductionModeNV::eMerge: return "Merge"; + case CoverageReductionModeNV::eTruncate: return "Truncate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkPipelineCoverageReductionStateCreateFlagsNV + { + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_provoking_vertex === + + enum class ProvokingVertexModeEXT + { + eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT, + eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value ) + { + switch ( value ) + { + case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex"; + case ProvokingVertexModeEXT::eLastVertex: return "LastVertex"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + enum class FullScreenExclusiveEXT + { + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) + { + switch ( value ) + { + case FullScreenExclusiveEXT::eDefault: return "Default"; + case FullScreenExclusiveEXT::eAllowed: return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed: return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + enum class HeadlessSurfaceCreateFlagBitsEXT : VkHeadlessSurfaceCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_line_rasterization === + + enum class LineRasterizationModeEXT + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) + { + switch ( value ) + { + case LineRasterizationModeEXT::eDefault: return "Default"; + case LineRasterizationModeEXT::eRectangular: return "Rectangular"; + case LineRasterizationModeEXT::eBresenham: return "Bresenham"; + case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_pipeline_executable_properties === + + enum class PipelineExecutableStatisticFormatKHR + { + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) + { + switch ( value ) + { + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_device_generated_commands === + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndirectCommandsTokenTypeNV + { + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_memory_report === + + enum class DeviceMemoryReportEventTypeEXT + { + eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT, + eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT, + eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT, + eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT, + eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value ) + { + switch ( value ) + { + case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate"; + case DeviceMemoryReportEventTypeEXT::eFree: return "Free"; + case DeviceMemoryReportEventTypeEXT::eImport: return "Import"; + case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport"; + case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class DeviceMemoryReportFlagBitsEXT : VkDeviceMemoryReportFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_pipeline_creation_cache_control === + + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_DEFAULT_KHR, + eReserved0 = VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR, + eReserved0 = VK_VIDEO_ENCODE_RATE_CONTROL_RESERVED_0_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeRateControlFlagBitsKHR::eReserved0: return "Reserved0"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR + { + eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR, + eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, + eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None"; + case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr"; + case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_shading_rate_enums === + + enum class FragmentShadingRateNV + { + e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV, + e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, + e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV, + eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value ) + { + switch ( value ) + { + case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case FragmentShadingRateNV::eNoInvocations: return "NoInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class FragmentShadingRateTypeNV + { + eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV, + eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value ) + { + switch ( value ) + { + case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize"; + case FragmentShadingRateTypeNV::eEnums: return "Enums"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_motion_blur === + + enum class AccelerationStructureMotionInstanceTypeNV + { + eStatic = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV, + eMatrixMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV, + eSrtMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMotionInstanceTypeNV::eStatic: return "Static"; + case AccelerationStructureMotionInstanceTypeNV::eMatrixMotion: return "MatrixMotion"; + case AccelerationStructureMotionInstanceTypeNV::eSrtMotion: return "SrtMotion"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class AccelerationStructureMotionInfoFlagBitsNV : VkAccelerationStructureMotionInfoFlagsNV + { + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagBitsNV ) + { + return "(void)"; + } + + enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV + { + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_ARM_rasterization_order_attachment_access === + + enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags + { + eRasterizationOrderAttachmentAccessARM = + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessARM: + return "RasterizationOrderAttachmentAccessARM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags + { + eRasterizationOrderAttachmentDepthAccessARM = + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessARM = + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessARM: + return "RasterizationOrderAttachmentDepthAccessARM"; + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessARM: + return "RasterizationOrderAttachmentStencilAccessARM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT + { + }; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + + enum class RayTracingShaderGroupTypeKHR + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; + + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ShaderGroupShaderKHR + { + eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, + eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, + eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, + eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value ) + { + switch ( value ) + { + case ShaderGroupShaderKHR::eGeneral: return "General"; + case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; + case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; + case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + enum class ImageConstraintsInfoFlagBitsFUCHSIA : VkImageConstraintsInfoFlagsFUCHSIA + { + eCpuReadRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA, + eCpuReadOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA, + eCpuWriteRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA, + eCpuWriteOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA, + eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA + }; + + VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagBitsFUCHSIA value ) + { + switch ( value ) + { + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely: return "CpuReadRarely"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften: return "CpuReadOften"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely: return "CpuWriteRarely"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften: return "CpuWriteOften"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional: return "ProtectedOptional"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA + { + }; + + VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagBitsFUCHSIA ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX + { + }; + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template + struct cpp_type + {}; + + //===================== + //=== Format Traits === + //===================== + + // The texel block size in bytes. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + + // The number of texels in a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + + // The three-dimensional extent of a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array blockExtent( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { 5, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { 5, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { 5, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { 5, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { 6, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { 6, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { 6, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { 6, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { 8, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { 8, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { 8, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { 8, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { 8, 8, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { 8, 8, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { 10, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { 10, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { 10, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { 10, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { 10, 8, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { 10, 8, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { 10, 10, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { 10, 10, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { 12, 10, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { 12, 10, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { 12, 12, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { 12, 12, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { 2, 1, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { 5, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { 5, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { 6, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { 6, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { 8, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { 8, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { 8, 8, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { 10, 5, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { 10, 6, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { 10, 8, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { 10, 10, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { 12, 10, 1 }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { 12, 12, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { 8, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { 8, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { 8, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { 4, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { 8, 4, 1 }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { 4, 4, 1 }; + + default: return { 1, 1, 1 }; + } + } + + // A textual description of the compression scheme, or an empty string if it is not compressed + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC"; + + default: return ""; + } + } + + // True, if this format is a compressed one. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 ); + } + + // The number of bits into which the format is packed. A single image element in this format + // can be stored in the same space as a scalar type of this bit width. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; + + default: return 0; + } + } + + // The number of components of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; + + default: return 0; + } + } + + // True, if the components of this format are compressed, otherwise false. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true; + default: return false; + } + } + + // The number of bits in this component, if not compressed, otherwise 0. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, + uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 6; + case 2: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 6; + case 2: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 5; + case 2: return 5; + case 3: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 5; + case 2: return 5; + case 3: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return 10; + case 1: return 11; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return 9; + case 1: return 9; + case 2: return 9; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return 24; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return 24; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return 11; + case 1: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return 11; + case 1: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + + default: return 0; + } + } + + // The plane this component lies in. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, + uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + + default: return 0; + } + } + + // The number of image planes of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2; + + default: return 1; + } + } + + // The single-plane format that this plane is compatible with. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format + planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return format; + } + } + + // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, + uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return 1; + } + } + + // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, + uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return 1; + } + } + + template + struct IndexTypeValue + {}; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; + }; + + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + + //================ + //=== BITMASKs === + //================ + + //=== VK_VERSION_1_0 === + + using FormatFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) | + VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) | + VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | VkFlags( FormatFeatureFlagBits::eVertexBuffer ) | + VkFlags( FormatFeatureFlagBits::eColorAttachment ) | VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) | + VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | VkFlags( FormatFeatureFlagBits::eBlitSrc ) | + VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) | + VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) | + VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | + VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | + VkFlags( FormatFeatureFlagBits::eDisjoint ) | VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) | + VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( FormatFeatureFlagBits::eVideoDecodeOutputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) | + VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) | + VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( FormatFeatureFlagBits::eVideoEncodeInputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, + FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FormatFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FormatFeatureFlagBits::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) + result += "SampledImageFilterCubicIMG | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) + result += "VideoDecodeOutputKHR | "; + if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) + result += "VideoEncodeInputKHR | "; + if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ImageCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) | + VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) | + VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) | + VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) | + VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | VkFlags( ImageCreateFlagBits::eExtendedUsage ) | + VkFlags( ImageCreateFlagBits::eProtected ) | VkFlags( ImageCreateFlagBits::eDisjoint ) | + VkFlags( ImageCreateFlagBits::eCornerSampledNV ) | + VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) | + VkFlags( ImageCreateFlagBits::eSubsampledEXT ) | VkFlags( ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, + ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & ImageCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & ImageCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & ImageCreateFlagBits::eMutableFormat ) + result += "MutableFormat | "; + if ( value & ImageCreateFlagBits::eCubeCompatible ) + result += "CubeCompatible | "; + if ( value & ImageCreateFlagBits::eAlias ) + result += "Alias | "; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) + result += "2DArrayCompatible | "; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) + result += "BlockTexelViewCompatible | "; + if ( value & ImageCreateFlagBits::eExtendedUsage ) + result += "ExtendedUsage | "; + if ( value & ImageCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & ImageCreateFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) + result += "CornerSampledNV | "; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) + result += "SampleLocationsCompatibleDepthEXT | "; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM ) + result += "FragmentDensityMapOffsetQCOM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ImageUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) | + VkFlags( ImageUsageFlagBits::eSampled ) | VkFlags( ImageUsageFlagBits::eStorage ) | + VkFlags( ImageUsageFlagBits::eColorAttachment ) | + VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) | + VkFlags( ImageUsageFlagBits::eTransientAttachment ) | VkFlags( ImageUsageFlagBits::eInputAttachment ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) | + VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) | + VkFlags( ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) | + VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( ImageUsageFlagBits::eInvocationMaskHUAWEI ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, + ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & ImageUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & ImageUsageFlagBits::eSampled ) + result += "Sampled | "; + if ( value & ImageUsageFlagBits::eStorage ) + result += "Storage | "; + if ( value & ImageUsageFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & ImageUsageFlagBits::eTransientAttachment ) + result += "TransientAttachment | "; + if ( value & ImageUsageFlagBits::eInputAttachment ) + result += "InputAttachment | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; + if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; + if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; + if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; + if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI ) + result += "InvocationMaskHUAWEI | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using InstanceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) + { + return "{}"; + } + + using MemoryHeapFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, + MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( MemoryHeapFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryHeapFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryHeapFlagBits::eMultiInstance ) + result += "MultiInstance | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryPropertyFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) | + VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) | + VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) | + VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) | + VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) | + VkFlags( MemoryPropertyFlagBits::eRdmaCapableNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags + operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( MemoryPropertyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryPropertyFlagBits::eHostVisible ) + result += "HostVisible | "; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) + result += "HostCoherent | "; + if ( value & MemoryPropertyFlagBits::eHostCached ) + result += "HostCached | "; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) + result += "LazilyAllocated | "; + if ( value & MemoryPropertyFlagBits::eProtected ) + result += "Protected | "; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) + result += "DeviceCoherentAMD | "; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) + result += "DeviceUncachedAMD | "; + if ( value & MemoryPropertyFlagBits::eRdmaCapableNV ) + result += "RdmaCapableNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueueFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) | + VkFlags( QueueFlagBits::eTransfer ) | VkFlags( QueueFlagBits::eSparseBinding ) | + VkFlags( QueueFlagBits::eProtected ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, + QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueueFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueueFlagBits::eGraphics ) + result += "Graphics | "; + if ( value & QueueFlagBits::eCompute ) + result += "Compute | "; + if ( value & QueueFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & QueueFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & QueueFlagBits::eProtected ) + result += "Protected | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueueFlagBits::eVideoDecodeKHR ) + result += "VideoDecodeKHR | "; + if ( value & QueueFlagBits::eVideoEncodeKHR ) + result += "VideoEncodeKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SampleCountFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) | + VkFlags( SampleCountFlagBits::e4 ) | VkFlags( SampleCountFlagBits::e8 ) | + VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) | + VkFlags( SampleCountFlagBits::e64 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, + SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SampleCountFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SampleCountFlagBits::e1 ) + result += "1 | "; + if ( value & SampleCountFlagBits::e2 ) + result += "2 | "; + if ( value & SampleCountFlagBits::e4 ) + result += "4 | "; + if ( value & SampleCountFlagBits::e8 ) + result += "8 | "; + if ( value & SampleCountFlagBits::e16 ) + result += "16 | "; + if ( value & SampleCountFlagBits::e32 ) + result += "32 | "; + if ( value & SampleCountFlagBits::e64 ) + result += "64 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DeviceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + { + return "{}"; + } + + using DeviceQueueCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags + operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DeviceQueueCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceQueueCreateFlagBits::eProtected ) + result += "Protected | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineStageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) | + VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) | + VkFlags( PipelineStageFlagBits::eTessellationControlShader ) | + VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) | + VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) | + VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) | + VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) | + VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) | + VkFlags( PipelineStageFlagBits::eHost ) | VkFlags( PipelineStageFlagBits::eAllGraphics ) | + VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eNone ) | + VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) | + VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) | + VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) | + VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | VkFlags( PipelineStageFlagBits::eTaskShaderNV ) | + VkFlags( PipelineStageFlagBits::eMeshShaderNV ) | VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) | + VkFlags( PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) | + VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, + PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineStageFlagBits::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eTaskShaderNV ) + result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits::eMeshShaderNV ) + result += "MeshShaderNV | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryMapFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + { + return "{}"; + } + + using ImageAspectFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) | + VkFlags( ImageAspectFlagBits::eStencil ) | VkFlags( ImageAspectFlagBits::eMetadata ) | + VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) | + VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) | + VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT ) | VkFlags( ImageAspectFlagBits::eNoneKHR ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, + ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageAspectFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageAspectFlagBits::eColor ) + result += "Color | "; + if ( value & ImageAspectFlagBits::eDepth ) + result += "Depth | "; + if ( value & ImageAspectFlagBits::eStencil ) + result += "Stencil | "; + if ( value & ImageAspectFlagBits::eMetadata ) + result += "Metadata | "; + if ( value & ImageAspectFlagBits::ePlane0 ) + result += "Plane0 | "; + if ( value & ImageAspectFlagBits::ePlane1 ) + result += "Plane1 | "; + if ( value & ImageAspectFlagBits::ePlane2 ) + result += "Plane2 | "; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) + result += "MemoryPlane0EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) + result += "MemoryPlane1EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) + result += "MemoryPlane2EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) + result += "MemoryPlane3EXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SparseImageFormatFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) | + VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) | + VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags + operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SparseImageFormatFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) + result += "SingleMiptail | "; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) + result += "AlignedMipSize | "; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) + result += "NonstandardBlockSize | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SparseMemoryBindFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags + operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SparseMemoryBindFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SparseMemoryBindFlagBits::eMetadata ) + result += "Metadata | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FenceCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FenceCreateFlagBits::eSignaled ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, + FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FenceCreateFlagBits::eSignaled ) + result += "Signaled | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SemaphoreCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) + { + return "{}"; + } + + using EventCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( EventCreateFlagBits::eDeviceOnly ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0, + EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return EventCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( EventCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & EventCreateFlagBits::eDeviceOnly ) + result += "DeviceOnly | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryPipelineStatisticFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) | + VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) | + VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) | + VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) | + VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags + operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( QueryPipelineStatisticFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) + result += "InputAssemblyVertices | "; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) + result += "InputAssemblyPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) + result += "VertexShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) + result += "GeometryShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) + result += "GeometryShaderPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) + result += "ClippingInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) + result += "ClippingPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) + result += "FragmentShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) + result += "TessellationControlShaderPatches | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) + result += "TessellationEvaluationShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + result += "ComputeShaderInvocations | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) + { + return "{}"; + } + + using QueryResultFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) | + VkFlags( QueryResultFlagBits::eWithAvailability ) | VkFlags( QueryResultFlagBits::ePartial ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( QueryResultFlagBits::eWithStatusKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, + QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryResultFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryResultFlagBits::e64 ) + result += "64 | "; + if ( value & QueryResultFlagBits::eWait ) + result += "Wait | "; + if ( value & QueryResultFlagBits::eWithAvailability ) + result += "WithAvailability | "; + if ( value & QueryResultFlagBits::ePartial ) + result += "Partial | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & QueryResultFlagBits::eWithStatusKHR ) + result += "WithStatusKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) | + VkFlags( BufferCreateFlagBits::eSparseAliased ) | VkFlags( BufferCreateFlagBits::eProtected ) | + VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, + BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & BufferCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & BufferCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & BufferCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) | + VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) | + VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) | + VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) | + VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) | + VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) | + VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) | + VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) | + VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) | + VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, + BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; + if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) + result += "AccelerationStructureBuildInputReadOnlyKHR | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) + result += "AccelerationStructureStorageKHR | "; + if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) + result += "ShaderBindingTableKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; + if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BufferViewCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) + { + return "{}"; + } + + using ImageViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) | + VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags + operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ImageViewCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) + result += "FragmentDensityMapDynamicEXT | "; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + result += "FragmentDensityMapDeferredEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ShaderModuleCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + { + return "{}"; + } + + using PipelineCacheCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronized ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags + operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCacheCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCacheCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized ) + result += "ExternallySynchronized | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ColorComponentFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) | + VkFlags( ColorComponentFlagBits::eB ) | VkFlags( ColorComponentFlagBits::eA ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags + operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ColorComponentFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ColorComponentFlagBits::eR ) + result += "R | "; + if ( value & ColorComponentFlagBits::eG ) + result += "G | "; + if ( value & ColorComponentFlagBits::eB ) + result += "B | "; + if ( value & ColorComponentFlagBits::eA ) + result += "A | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CullModeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) | + VkFlags( CullModeFlagBits::eBack ) | VkFlags( CullModeFlagBits::eFrontAndBack ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, + CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CullModeFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CullModeFlagBits::eFront ) + result += "Front | "; + if ( value & CullModeFlagBits::eBack ) + result += "Back | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineColorBlendStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessARM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator|( + PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineColorBlendStateCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator&( + PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineColorBlendStateCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator^( + PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineColorBlendStateCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags + operator~( PipelineColorBlendStateCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineColorBlendStateCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessARM ) + result += "RasterizationOrderAttachmentAccessARM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) | + VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) | + VkFlags( PipelineCreateFlagBits::eDispatchBase ) | + VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) | + VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailure ) | + VkFlags( PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) | + VkFlags( PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) | + VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) | + VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | + VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | + VkFlags( PipelineCreateFlagBits::eRayTracingAllowMotionNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags + operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreateFlagBits::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) + result += "FailOnPipelineCompileRequired | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) + result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingAllowMotionNV ) + result += "RayTracingAllowMotionNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineDepthStencilStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessARM ) | + VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessARM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator|( + PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator&( + PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineDepthStencilStateCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator^( + PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineDepthStencilStateCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags + operator~( PipelineDepthStencilStateCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineDepthStencilStateCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessARM ) + result += "RasterizationOrderAttachmentDepthAccessARM | "; + if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessARM ) + result += "RasterizationOrderAttachmentStencilAccessARM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineDynamicStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + { + return "{}"; + } + + using PipelineInputAssemblyStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + { + return "{}"; + } + + using PipelineLayoutCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) + { + return "{}"; + } + + using PipelineMultisampleStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) + { + return "{}"; + } + + using PipelineRasterizationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) + { + return "{}"; + } + + using PipelineShaderStageCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) | + VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroups ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags + operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineShaderStageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) + result += "AllowVaryingSubgroupSize | "; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroups ) + result += "RequireFullSubgroups | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PipelineTessellationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) + { + return "{}"; + } + + using PipelineVertexInputStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) + { + return "{}"; + } + + using PipelineViewportStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) + { + return "{}"; + } + + using ShaderStageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) | + VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) | + VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) | + VkFlags( ShaderStageFlagBits::eAllGraphics ) | VkFlags( ShaderStageFlagBits::eAll ) | + VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) | + VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) | + VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | VkFlags( ShaderStageFlagBits::eCallableKHR ) | + VkFlags( ShaderStageFlagBits::eTaskNV ) | VkFlags( ShaderStageFlagBits::eMeshNV ) | + VkFlags( ShaderStageFlagBits::eSubpassShadingHUAWEI ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, + ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ShaderStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ShaderStageFlagBits::eVertex ) + result += "Vertex | "; + if ( value & ShaderStageFlagBits::eTessellationControl ) + result += "TessellationControl | "; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) + result += "TessellationEvaluation | "; + if ( value & ShaderStageFlagBits::eGeometry ) + result += "Geometry | "; + if ( value & ShaderStageFlagBits::eFragment ) + result += "Fragment | "; + if ( value & ShaderStageFlagBits::eCompute ) + result += "Compute | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) + result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) + result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) + result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) + result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) + result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) + result += "CallableKHR | "; + if ( value & ShaderStageFlagBits::eTaskNV ) + result += "TaskNV | "; + if ( value & ShaderStageFlagBits::eMeshNV ) + result += "MeshNV | "; + if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI ) + result += "SubpassShadingHUAWEI | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SamplerCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) | + VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, + SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SamplerCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + result += "SubsampledCoarseReconstructionEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DescriptorPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) | + VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags + operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) + result += "FreeDescriptorSet | "; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) + result += "HostOnlyVALVE | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DescriptorPoolResetFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) + { + return "{}"; + } + + using DescriptorSetLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) | + VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags + operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorSetLayoutCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) + result += "UpdateAfterBindPool | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) + result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) + result += "HostOnlyPoolVALVE | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccessFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) | + VkFlags( AccessFlagBits::eVertexAttributeRead ) | VkFlags( AccessFlagBits::eUniformRead ) | + VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) | + VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) | + VkFlags( AccessFlagBits::eColorAttachmentWrite ) | VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) | + VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | VkFlags( AccessFlagBits::eTransferRead ) | + VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) | + VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) | + VkFlags( AccessFlagBits::eMemoryWrite ) | VkFlags( AccessFlagBits::eNone ) | + VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) | + VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) | + VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) | + VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) | + VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) | + VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) | + VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) | + VkFlags( AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) | + VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | VkFlags( AccessFlagBits::eCommandPreprocessWriteNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, + AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccessFlagBits::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) + result += "FragmentShadingRateAttachmentReadKHR | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AttachmentDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags + operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( AttachmentDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) + result += "MayAlias | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DependencyFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) | + VkFlags( DependencyFlagBits::eViewLocal ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, + DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DependencyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DependencyFlagBits::eByRegion ) + result += "ByRegion | "; + if ( value & DependencyFlagBits::eDeviceGroup ) + result += "DeviceGroup | "; + if ( value & DependencyFlagBits::eViewLocal ) + result += "ViewLocal | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FramebufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FramebufferCreateFlagBits::eImageless ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags + operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( FramebufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FramebufferCreateFlagBits::eImageless ) + result += "Imageless | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using RenderPassCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags + operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderPassCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( RenderPassCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) + result += "TransformQCOM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SubpassDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) | + VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) | + VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) | + VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM ) | + VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessARM ) | + VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessARM ) | + VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessARM ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags + operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SubpassDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) + result += "PerViewAttributesNVX | "; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) + result += "PerViewPositionXOnlyNVX | "; + if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) + result += "FragmentRegionQCOM | "; + if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) + result += "ShaderResolveQCOM | "; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessARM ) + result += "RasterizationOrderAttachmentColorAccessARM | "; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessARM ) + result += "RasterizationOrderAttachmentDepthAccessARM | "; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessARM ) + result += "RasterizationOrderAttachmentStencilAccessARM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) | + VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) | + VkFlags( CommandPoolCreateFlagBits::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags + operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandPoolCreateFlagBits::eTransient ) + result += "Transient | "; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) + result += "ResetCommandBuffer | "; + if ( value & CommandPoolCreateFlagBits::eProtected ) + result += "Protected | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags + operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandPoolResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandPoolResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandBufferResetFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags + operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandBufferResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandBufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) | + VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) | + VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags + operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) + result += "OneTimeSubmit | "; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) + result += "RenderPassContinue | "; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) + result += "SimultaneousUse | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using QueryControlFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( QueryControlFlagBits::ePrecise ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, + QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryControlFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryControlFlagBits::ePrecise ) + result += "Precise | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using StencilFaceFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) | + VkFlags( StencilFaceFlagBits::eFrontAndBack ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, + StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( StencilFaceFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & StencilFaceFlagBits::eFront ) + result += "Front | "; + if ( value & StencilFaceFlagBits::eBack ) + result += "Back | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_1 === + + using SubgroupFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) | + VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | VkFlags( SubgroupFeatureFlagBits::eBallot ) | + VkFlags( SubgroupFeatureFlagBits::eShuffle ) | VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) | + VkFlags( SubgroupFeatureFlagBits::eClustered ) | VkFlags( SubgroupFeatureFlagBits::eQuad ) | + VkFlags( SubgroupFeatureFlagBits::ePartitionedNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags + operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SubgroupFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubgroupFeatureFlagBits::eBasic ) + result += "Basic | "; + if ( value & SubgroupFeatureFlagBits::eVote ) + result += "Vote | "; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) + result += "Arithmetic | "; + if ( value & SubgroupFeatureFlagBits::eBallot ) + result += "Ballot | "; + if ( value & SubgroupFeatureFlagBits::eShuffle ) + result += "Shuffle | "; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) + result += "ShuffleRelative | "; + if ( value & SubgroupFeatureFlagBits::eClustered ) + result += "Clustered | "; + if ( value & SubgroupFeatureFlagBits::eQuad ) + result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) + result += "PartitionedNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PeerMemoryFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) | + VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags + operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PeerMemoryFeatureFlags( bits ) ); + } + + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) + result += "CopySrc | "; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) + result += "CopyDst | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) + result += "GenericSrc | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) + result += "GenericDst | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using MemoryAllocateFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) | + VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags + operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( MemoryAllocateFlags( bits ) ); + } + + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryAllocateFlagBits::eDeviceMask ) + result += "DeviceMask | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddress ) + result += "DeviceAddress | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using CommandPoolTrimFlags = Flags; + + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) + { + return "{}"; + } + + using DescriptorUpdateTemplateCreateFlags = Flags; + + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + { + return "{}"; + } + + using ExternalMemoryHandleTypeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) | + VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | VkFlags( ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + | VkFlags( ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags + operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlags( bits ) ); + } + + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) + result += "D3D11Texture | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) + result += "D3D11TextureKmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) + result += "D3D12Heap | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) + result += "D3D12Resource | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) + result += "DmaBufEXT | "; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) + result += "AndroidHardwareBufferANDROID | "; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) + result += "HostAllocationEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) + result += "HostMappedForeignMemoryEXT | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) + result += "ZirconVmoFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) + result += "RdmaAddressNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalMemoryFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBits::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags + operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlags( bits ) ); + } + + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalFenceHandleTypeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags + operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalFenceHandleTypeFlags( bits ) ); + } + + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalFenceFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags + operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalFenceFeatureFlags( bits ) ); + } + + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalFenceFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FenceImportFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( FenceImportFlagBits::eTemporary ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, + FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceImportFlags( bits ) ); + } + + using FenceImportFlagsKHR = FenceImportFlags; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FenceImportFlagBits::eTemporary ) + result += "Temporary | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SemaphoreImportFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags + operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SemaphoreImportFlags( bits ) ); + } + + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SemaphoreImportFlagBits::eTemporary ) + result += "Temporary | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalSemaphoreHandleTypeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) | + VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags + operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); + } + + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) + result += "D3D12Fence | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) + result += "ZirconEventFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalSemaphoreFeatureFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) | + VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags + operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalSemaphoreFeatureFlags( bits ) ); + } + + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_2 === + + using DescriptorBindingFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) | + VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) | + VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) | + VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags + operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorBindingFlags( bits ) ); + } + + using DescriptorBindingFlagsEXT = DescriptorBindingFlags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) + result += "UpdateUnusedWhilePending | "; + if ( value & DescriptorBindingFlagBits::ePartiallyBound ) + result += "PartiallyBound | "; + if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) + result += "VariableDescriptorCount | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ResolveModeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) | + VkFlags( ResolveModeFlagBits::eAverage ) | VkFlags( ResolveModeFlagBits::eMin ) | + VkFlags( ResolveModeFlagBits::eMax ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, + ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ResolveModeFlags( bits ) ); + } + + using ResolveModeFlagsKHR = ResolveModeFlags; + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ResolveModeFlagBits::eSampleZero ) + result += "SampleZero | "; + if ( value & ResolveModeFlagBits::eAverage ) + result += "Average | "; + if ( value & ResolveModeFlagBits::eMin ) + result += "Min | "; + if ( value & ResolveModeFlagBits::eMax ) + result += "Max | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SemaphoreWaitFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SemaphoreWaitFlagBits::eAny ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreWaitFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreWaitFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, + SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreWaitFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SemaphoreWaitFlags( bits ) ); + } + + using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SemaphoreWaitFlagBits::eAny ) + result += "Any | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_3 === + + using PipelineCreationFeedbackFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PipelineCreationFeedbackFlagBits::eValid ) | + VkFlags( PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) | + VkFlags( PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags + operator|( PipelineCreationFeedbackFlagBits bit0, PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags + operator&( PipelineCreationFeedbackFlagBits bit0, PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags + operator^( PipelineCreationFeedbackFlagBits bit0, PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags + operator~( PipelineCreationFeedbackFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreationFeedbackFlags( bits ) ); + } + + using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreationFeedbackFlagBits::eValid ) + result += "Valid | "; + if ( value & PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) + result += "ApplicationPipelineCacheHit | "; + if ( value & PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration ) + result += "BasePipelineAcceleration | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ToolPurposeFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ToolPurposeFlagBits::eValidation ) | VkFlags( ToolPurposeFlagBits::eProfiling ) | + VkFlags( ToolPurposeFlagBits::eTracing ) | VkFlags( ToolPurposeFlagBits::eAdditionalFeatures ) | + VkFlags( ToolPurposeFlagBits::eModifyingFeatures ) | + VkFlags( ToolPurposeFlagBits::eDebugReportingEXT ) | VkFlags( ToolPurposeFlagBits::eDebugMarkersEXT ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator|( ToolPurposeFlagBits bit0, + ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator&( ToolPurposeFlagBits bit0, + ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator^( ToolPurposeFlagBits bit0, + ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ToolPurposeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator~( ToolPurposeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ToolPurposeFlags( bits ) ); + } + + using ToolPurposeFlagsEXT = ToolPurposeFlags; + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ToolPurposeFlagBits::eValidation ) + result += "Validation | "; + if ( value & ToolPurposeFlagBits::eProfiling ) + result += "Profiling | "; + if ( value & ToolPurposeFlagBits::eTracing ) + result += "Tracing | "; + if ( value & ToolPurposeFlagBits::eAdditionalFeatures ) + result += "AdditionalFeatures | "; + if ( value & ToolPurposeFlagBits::eModifyingFeatures ) + result += "ModifyingFeatures | "; + if ( value & ToolPurposeFlagBits::eDebugReportingEXT ) + result += "DebugReportingEXT | "; + if ( value & ToolPurposeFlagBits::eDebugMarkersEXT ) + result += "DebugMarkersEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using PrivateDataSlotCreateFlags = Flags; + + using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags; + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlags ) + { + return "{}"; + } + + using PipelineStageFlags2 = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( PipelineStageFlagBits2::eNone ) | VkFlags64( PipelineStageFlagBits2::eTopOfPipe ) | + VkFlags64( PipelineStageFlagBits2::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2::eVertexInput ) | + VkFlags64( PipelineStageFlagBits2::eVertexShader ) | + VkFlags64( PipelineStageFlagBits2::eTessellationControlShader ) | + VkFlags64( PipelineStageFlagBits2::eTessellationEvaluationShader ) | + VkFlags64( PipelineStageFlagBits2::eGeometryShader ) | VkFlags64( PipelineStageFlagBits2::eFragmentShader ) | + VkFlags64( PipelineStageFlagBits2::eEarlyFragmentTests ) | + VkFlags64( PipelineStageFlagBits2::eLateFragmentTests ) | + VkFlags64( PipelineStageFlagBits2::eColorAttachmentOutput ) | + VkFlags64( PipelineStageFlagBits2::eComputeShader ) | VkFlags64( PipelineStageFlagBits2::eAllTransfer ) | + VkFlags64( PipelineStageFlagBits2::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2::eHost ) | + VkFlags64( PipelineStageFlagBits2::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2::eAllCommands ) | + VkFlags64( PipelineStageFlagBits2::eCopy ) | VkFlags64( PipelineStageFlagBits2::eResolve ) | + VkFlags64( PipelineStageFlagBits2::eBlit ) | VkFlags64( PipelineStageFlagBits2::eClear ) | + VkFlags64( PipelineStageFlagBits2::eIndexInput ) | VkFlags64( PipelineStageFlagBits2::eVertexAttributeInput ) | + VkFlags64( PipelineStageFlagBits2::ePreRasterizationShaders ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( PipelineStageFlagBits2::eVideoDecodeKHR ) | VkFlags64( PipelineStageFlagBits2::eVideoEncodeKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( PipelineStageFlagBits2::eTransformFeedbackEXT ) | + VkFlags64( PipelineStageFlagBits2::eConditionalRenderingEXT ) | + VkFlags64( PipelineStageFlagBits2::eCommandPreprocessNV ) | + VkFlags64( PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) | + VkFlags64( PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) | + VkFlags64( PipelineStageFlagBits2::eRayTracingShaderKHR ) | + VkFlags64( PipelineStageFlagBits2::eFragmentDensityProcessEXT ) | + VkFlags64( PipelineStageFlagBits2::eTaskShaderNV ) | VkFlags64( PipelineStageFlagBits2::eMeshShaderNV ) | + VkFlags64( PipelineStageFlagBits2::eSubpassShadingHUAWEI ) | + VkFlags64( PipelineStageFlagBits2::eInvocationMaskHUAWEI ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 + operator|( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 + operator&( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 + operator^( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags2( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 operator~( PipelineStageFlagBits2 bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags2( bits ) ); + } + + using PipelineStageFlags2KHR = PipelineStageFlags2; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineStageFlagBits2::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits2::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits2::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits2::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits2::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits2::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits2::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits2::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits2::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits2::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits2::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits2::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits2::eAllTransfer ) + result += "AllTransfer | "; + if ( value & PipelineStageFlagBits2::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits2::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits2::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits2::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits2::eCopy ) + result += "Copy | "; + if ( value & PipelineStageFlagBits2::eResolve ) + result += "Resolve | "; + if ( value & PipelineStageFlagBits2::eBlit ) + result += "Blit | "; + if ( value & PipelineStageFlagBits2::eClear ) + result += "Clear | "; + if ( value & PipelineStageFlagBits2::eIndexInput ) + result += "IndexInput | "; + if ( value & PipelineStageFlagBits2::eVertexAttributeInput ) + result += "VertexAttributeInput | "; + if ( value & PipelineStageFlagBits2::ePreRasterizationShaders ) + result += "PreRasterizationShaders | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineStageFlagBits2::eVideoDecodeKHR ) + result += "VideoDecodeKHR | "; + if ( value & PipelineStageFlagBits2::eVideoEncodeKHR ) + result += "VideoEncodeKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits2::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + if ( value & PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits2::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits2::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits2::eTaskShaderNV ) + result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits2::eMeshShaderNV ) + result += "MeshShaderNV | "; + if ( value & PipelineStageFlagBits2::eSubpassShadingHUAWEI ) + result += "SubpassShadingHUAWEI | "; + if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI ) + result += "InvocationMaskHUAWEI | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccessFlags2 = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( AccessFlagBits2::eNone ) | VkFlags64( AccessFlagBits2::eIndirectCommandRead ) | + VkFlags64( AccessFlagBits2::eIndexRead ) | VkFlags64( AccessFlagBits2::eVertexAttributeRead ) | + VkFlags64( AccessFlagBits2::eUniformRead ) | VkFlags64( AccessFlagBits2::eInputAttachmentRead ) | + VkFlags64( AccessFlagBits2::eShaderRead ) | VkFlags64( AccessFlagBits2::eShaderWrite ) | + VkFlags64( AccessFlagBits2::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2::eColorAttachmentWrite ) | + VkFlags64( AccessFlagBits2::eDepthStencilAttachmentRead ) | + VkFlags64( AccessFlagBits2::eDepthStencilAttachmentWrite ) | VkFlags64( AccessFlagBits2::eTransferRead ) | + VkFlags64( AccessFlagBits2::eTransferWrite ) | VkFlags64( AccessFlagBits2::eHostRead ) | + VkFlags64( AccessFlagBits2::eHostWrite ) | VkFlags64( AccessFlagBits2::eMemoryRead ) | + VkFlags64( AccessFlagBits2::eMemoryWrite ) | VkFlags64( AccessFlagBits2::eShaderSampledRead ) | + VkFlags64( AccessFlagBits2::eShaderStorageRead ) | VkFlags64( AccessFlagBits2::eShaderStorageWrite ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( AccessFlagBits2::eVideoDecodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoDecodeWriteKHR ) | + VkFlags64( AccessFlagBits2::eVideoEncodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoEncodeWriteKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( AccessFlagBits2::eTransformFeedbackWriteEXT ) | + VkFlags64( AccessFlagBits2::eTransformFeedbackCounterReadEXT ) | + VkFlags64( AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) | + VkFlags64( AccessFlagBits2::eConditionalRenderingReadEXT ) | + VkFlags64( AccessFlagBits2::eCommandPreprocessReadNV ) | + VkFlags64( AccessFlagBits2::eCommandPreprocessWriteNV ) | + VkFlags64( AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) | + VkFlags64( AccessFlagBits2::eAccelerationStructureReadKHR ) | + VkFlags64( AccessFlagBits2::eAccelerationStructureWriteKHR ) | + VkFlags64( AccessFlagBits2::eFragmentDensityMapReadEXT ) | + VkFlags64( AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) | + VkFlags64( AccessFlagBits2::eInvocationMaskReadHUAWEI ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator|( AccessFlagBits2 bit0, + AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator&( AccessFlagBits2 bit0, + AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator^( AccessFlagBits2 bit0, + AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags2( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator~( AccessFlagBits2 bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags2( bits ) ); + } + + using AccessFlags2KHR = AccessFlags2; + + VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccessFlagBits2::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits2::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits2::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits2::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits2::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits2::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits2::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits2::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits2::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits2::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits2::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits2::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits2::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits2::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits2::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits2::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits2::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits2::eShaderSampledRead ) + result += "ShaderSampledRead | "; + if ( value & AccessFlagBits2::eShaderStorageRead ) + result += "ShaderStorageRead | "; + if ( value & AccessFlagBits2::eShaderStorageWrite ) + result += "ShaderStorageWrite | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & AccessFlagBits2::eVideoDecodeReadKHR ) + result += "VideoDecodeReadKHR | "; + if ( value & AccessFlagBits2::eVideoDecodeWriteKHR ) + result += "VideoDecodeWriteKHR | "; + if ( value & AccessFlagBits2::eVideoEncodeReadKHR ) + result += "VideoEncodeReadKHR | "; + if ( value & AccessFlagBits2::eVideoEncodeWriteKHR ) + result += "VideoEncodeWriteKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits2::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits2::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits2::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + if ( value & AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) + result += "FragmentShadingRateAttachmentReadKHR | "; + if ( value & AccessFlagBits2::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits2::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits2::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI ) + result += "InvocationMaskReadHUAWEI | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using SubmitFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SubmitFlagBits::eProtected ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator|( SubmitFlagBits bit0, + SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator&( SubmitFlagBits bit0, + SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator^( SubmitFlagBits bit0, + SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubmitFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator~( SubmitFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SubmitFlags( bits ) ); + } + + using SubmitFlagsKHR = SubmitFlags; + + VULKAN_HPP_INLINE std::string to_string( SubmitFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubmitFlagBits::eProtected ) + result += "Protected | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using RenderingFlags = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( RenderingFlagBits::eContentsSecondaryCommandBuffers ) | + VkFlags( RenderingFlagBits::eSuspending ) | VkFlags( RenderingFlagBits::eResuming ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator|( RenderingFlagBits bit0, + RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderingFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator&( RenderingFlagBits bit0, + RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderingFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator^( RenderingFlagBits bit0, + RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return RenderingFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator~( RenderingFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( RenderingFlags( bits ) ); + } + + using RenderingFlagsKHR = RenderingFlags; + + VULKAN_HPP_INLINE std::string to_string( RenderingFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & RenderingFlagBits::eContentsSecondaryCommandBuffers ) + result += "ContentsSecondaryCommandBuffers | "; + if ( value & RenderingFlagBits::eSuspending ) + result += "Suspending | "; + if ( value & RenderingFlagBits::eResuming ) + result += "Resuming | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using FormatFeatureFlags2 = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags64 + { + allFlags = + VkFlags64( FormatFeatureFlagBits2::eSampledImage ) | VkFlags64( FormatFeatureFlagBits2::eStorageImage ) | + VkFlags64( FormatFeatureFlagBits2::eStorageImageAtomic ) | + VkFlags64( FormatFeatureFlagBits2::eUniformTexelBuffer ) | + VkFlags64( FormatFeatureFlagBits2::eStorageTexelBuffer ) | + VkFlags64( FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) | + VkFlags64( FormatFeatureFlagBits2::eVertexBuffer ) | VkFlags64( FormatFeatureFlagBits2::eColorAttachment ) | + VkFlags64( FormatFeatureFlagBits2::eColorAttachmentBlend ) | + VkFlags64( FormatFeatureFlagBits2::eDepthStencilAttachment ) | VkFlags64( FormatFeatureFlagBits2::eBlitSrc ) | + VkFlags64( FormatFeatureFlagBits2::eBlitDst ) | VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterLinear ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterCubic ) | + VkFlags64( FormatFeatureFlagBits2::eTransferSrc ) | VkFlags64( FormatFeatureFlagBits2::eTransferDst ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterMinmax ) | + VkFlags64( FormatFeatureFlagBits2::eMidpointChromaSamples ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | + VkFlags64( FormatFeatureFlagBits2::eDisjoint ) | VkFlags64( FormatFeatureFlagBits2::eCositedChromaSamples ) | + VkFlags64( FormatFeatureFlagBits2::eStorageReadWithoutFormat ) | + VkFlags64( FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) | + VkFlags64( FormatFeatureFlagBits2::eSampledImageDepthComparison ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) | + VkFlags64( FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) | + VkFlags64( FormatFeatureFlagBits2::eFragmentDensityMapEXT ) | + VkFlags64( FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags64( FormatFeatureFlagBits2::eVideoEncodeInputKHR ) | + VkFlags64( FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | VkFlags64( FormatFeatureFlagBits2::eLinearColorAttachmentNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 + operator|( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags2( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 + operator&( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags2( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 + operator^( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags2( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 operator~( FormatFeatureFlagBits2 bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( FormatFeatureFlags2( bits ) ); + } + + using FormatFeatureFlags2KHR = FormatFeatureFlags2; + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FormatFeatureFlagBits2::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits2::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits2::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits2::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits2::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits2::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits2::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits2::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits2::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits2::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits2::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) + result += "SampledImageFilterCubic | "; + if ( value & FormatFeatureFlagBits2::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits2::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits2::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits2::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits2::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits2::eStorageReadWithoutFormat ) + result += "StorageReadWithoutFormat | "; + if ( value & FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) + result += "StorageWriteWithoutFormat | "; + if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison ) + result += "SampledImageDepthComparison | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) + result += "VideoDecodeOutputKHR | "; + if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) + result += "VideoEncodeInputKHR | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV ) + result += "LinearColorAttachmentNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_surface === + + using CompositeAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) | + VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR + operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( CompositeAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) + result += "PreMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) + result += "PostMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) + result += "Inherit | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_swapchain === + + using SwapchainCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) | + VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) | + VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR + operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SwapchainCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) + result += "Protected | "; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) + result += "MutableFormat | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DeviceGroupPresentModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) | + VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR + operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) + result += "Local | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) + result += "Remote | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) + result += "Sum | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + result += "LocalMultiDevice | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_display === + + using DisplayModeCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) + { + return "{}"; + } + + using DisplayPlaneAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) | + VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR + operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) + result += "Global | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) + result += "PerPixel | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + result += "PerPixelPremultiplied | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DisplaySurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) + { + return "{}"; + } + + using SurfaceTransformFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) | + VkFlags( SurfaceTransformFlagBitsKHR::eInherit ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR + operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceTransformFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) + result += "Identity | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) + result += "Rotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) + result += "Rotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) + result += "Rotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) + result += "HorizontalMirror | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) + result += "HorizontalMirrorRotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) + result += "HorizontalMirrorRotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) + result += "HorizontalMirrorRotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) + result += "Inherit | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + using XlibSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + using XcbSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + using WaylandSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + using AndroidSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + using Win32SurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + using DebugReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) | + VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) | + VkFlags( DebugReportFlagBitsEXT::eDebug ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT + operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( DebugReportFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugReportFlagBitsEXT::eInformation ) + result += "Information | "; + if ( value & DebugReportFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) + result += "PerformanceWarning | "; + if ( value & DebugReportFlagBitsEXT::eError ) + result += "Error | "; + if ( value & DebugReportFlagBitsEXT::eDebug ) + result += "Debug | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + using VideoCodecOperationFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eInvalid ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) | + VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator|( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator&( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR + operator^( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodecOperationFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator~( VideoCodecOperationFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodecOperationFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) + result += "EncodeH264EXT | "; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) + result += "EncodeH265EXT | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) + result += "DecodeH264EXT | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) + result += "DecodeH265EXT | "; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoChromaSubsamplingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) | + VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator|( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator&( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator^( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoChromaSubsamplingFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR + operator~( VideoChromaSubsamplingFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoChromaSubsamplingFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) + result += "Monochrome | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 ) + result += "420 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 ) + result += "422 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 ) + result += "444 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoComponentBitDepthFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) | + VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) | + VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator|( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator&( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator^( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoComponentBitDepthFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR + operator~( VideoComponentBitDepthFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoComponentBitDepthFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) + result += "8 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e10 ) + result += "10 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e12 ) + result += "12 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCapabilityFlagBitsKHR::eProtectedContent ) | + VkFlags( VideoCapabilityFlagBitsKHR::eSeparateReferenceImages ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR + operator|( VideoCapabilityFlagBitsKHR bit0, VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilityFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR + operator&( VideoCapabilityFlagBitsKHR bit0, VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilityFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR + operator^( VideoCapabilityFlagBitsKHR bit0, VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCapabilityFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR operator~( VideoCapabilityFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCapabilityFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCapabilityFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + if ( value & VideoCapabilityFlagBitsKHR::eSeparateReferenceImages ) + result += "SeparateReferenceImages | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoSessionCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoSessionCreateFlagBitsKHR::eDefault ) | VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator|( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator&( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR + operator^( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoSessionCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator~( VideoSessionCreateFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoSessionCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoBeginCodingFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR ) + { + return "{}"; + } + + using VideoEndCodingFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR ) + { + return "{}"; + } + + using VideoCodingControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eDefault ) | VkFlags( VideoCodingControlFlagBitsKHR::eReset ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator|( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator&( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR + operator^( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingControlFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator~( VideoCodingControlFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodingControlFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCodingControlFlagBitsKHR::eReset ) + result += "Reset | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoCodingQualityPresetFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoCodingQualityPresetFlagBitsKHR::eNormal ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::ePower ) | + VkFlags( VideoCodingQualityPresetFlagBitsKHR::eQuality ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator|( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator&( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator^( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoCodingQualityPresetFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR + operator~( VideoCodingQualityPresetFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoCodingQualityPresetFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCodingQualityPresetFlagBitsKHR::eNormal ) + result += "Normal | "; + if ( value & VideoCodingQualityPresetFlagBitsKHR::ePower ) + result += "Power | "; + if ( value & VideoCodingQualityPresetFlagBitsKHR::eQuality ) + result += "Quality | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + using VideoDecodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoDecodeFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator|( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator&( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR + operator^( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator~( VideoDecodeFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoDecodeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoDecodeFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + + using VideoEncodeH264CapabilityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBiPredImplicit ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) | + VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eEvenlyDistributedSliceSize ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator|( + VideoEncodeH264CapabilityFlagBitsEXT bit0, VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator&( + VideoEncodeH264CapabilityFlagBitsEXT bit0, VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilityFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator^( + VideoEncodeH264CapabilityFlagBitsEXT bit0, VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CapabilityFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT + operator~( VideoEncodeH264CapabilityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264CapabilityFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) + result += "Cabac | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) + result += "Cavlc | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBiPredImplicit ) + result += "WeightedBiPredImplicit | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) + result += "Transform8X8 | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) + result += "ChromaQpOffset | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) + result += "SecondChromaQpOffset | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) + result += "DeblockingFilterDisabled | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) + result += "DeblockingFilterEnabled | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) + result += "DeblockingFilterPartial | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) + result += "MultipleSlicePerFrame | "; + if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eEvenlyDistributedSliceSize ) + result += "EvenlyDistributedSliceSize | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264InputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) | + VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) | + VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator|( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator&( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator^( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264InputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT + operator~( VideoEncodeH264InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264InputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame ) + result += "Frame | "; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice ) + result += "Slice | "; + if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) + result += "NonVcl | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264OutputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) | + VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) | + VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator|( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator&( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator^( + VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264OutputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT + operator~( VideoEncodeH264OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264OutputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) + result += "Frame | "; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) + result += "Slice | "; + if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) + result += "NonVcl | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264CreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = + VkFlags( VideoEncodeH264CreateFlagBitsEXT::eDefault ) | VkFlags( VideoEncodeH264CreateFlagBitsEXT::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator|( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator&( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator^( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264CreateFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT + operator~( VideoEncodeH264CreateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264CreateFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264CreateFlagBitsEXT::eReserved0 ) + result += "Reserved0 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH264RateControlStructureFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown ) | + VkFlags( VideoEncodeH264RateControlStructureFlagBitsEXT::eFlat ) | + VkFlags( VideoEncodeH264RateControlStructureFlagBitsEXT::eDyadic ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT + operator|( VideoEncodeH264RateControlStructureFlagBitsEXT bit0, + VideoEncodeH264RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264RateControlStructureFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT + operator&( VideoEncodeH264RateControlStructureFlagBitsEXT bit0, + VideoEncodeH264RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264RateControlStructureFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT + operator^( VideoEncodeH264RateControlStructureFlagBitsEXT bit0, + VideoEncodeH264RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH264RateControlStructureFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlStructureFlagsEXT + operator~( VideoEncodeH264RateControlStructureFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH264RateControlStructureFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264RateControlStructureFlagBitsEXT::eFlat ) + result += "Flat | "; + if ( value & VideoEncodeH264RateControlStructureFlagBitsEXT::eDyadic ) + result += "Dyadic | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h265 === + + using VideoEncodeH265CapabilityFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsEXT ) + { + return "{}"; + } + + using VideoEncodeH265InputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eFrame ) | + VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eSlice ) | + VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT + operator|( VideoEncodeH265InputModeFlagBitsEXT bit0, VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265InputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT + operator&( VideoEncodeH265InputModeFlagBitsEXT bit0, VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265InputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT + operator^( VideoEncodeH265InputModeFlagBitsEXT bit0, VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265InputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT + operator~( VideoEncodeH265InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH265InputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265InputModeFlagBitsEXT::eFrame ) + result += "Frame | "; + if ( value & VideoEncodeH265InputModeFlagBitsEXT::eSlice ) + result += "Slice | "; + if ( value & VideoEncodeH265InputModeFlagBitsEXT::eNonVcl ) + result += "NonVcl | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH265OutputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) | + VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eSlice ) | + VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator|( + VideoEncodeH265OutputModeFlagBitsEXT bit0, VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265OutputModeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator&( + VideoEncodeH265OutputModeFlagBitsEXT bit0, VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265OutputModeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator^( + VideoEncodeH265OutputModeFlagBitsEXT bit0, VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265OutputModeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT + operator~( VideoEncodeH265OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH265OutputModeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) + result += "Frame | "; + if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eSlice ) + result += "Slice | "; + if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl ) + result += "NonVcl | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH265CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CreateFlagsEXT ) + { + return "{}"; + } + + using VideoEncodeH265CtbSizeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e8 ) | VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) | + VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) | VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e64 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT + operator|( VideoEncodeH265CtbSizeFlagBitsEXT bit0, VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT + operator&( VideoEncodeH265CtbSizeFlagBitsEXT bit0, VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT + operator^( VideoEncodeH265CtbSizeFlagBitsEXT bit0, VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT + operator~( VideoEncodeH265CtbSizeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH265CtbSizeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e8 ) + result += "8 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) + result += "16 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) + result += "32 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e64 ) + result += "64 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeH265RateControlStructureFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown ) | + VkFlags( VideoEncodeH265RateControlStructureFlagBitsEXT::eFlat ) | + VkFlags( VideoEncodeH265RateControlStructureFlagBitsEXT::eDyadic ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT + operator|( VideoEncodeH265RateControlStructureFlagBitsEXT bit0, + VideoEncodeH265RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265RateControlStructureFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT + operator&( VideoEncodeH265RateControlStructureFlagBitsEXT bit0, + VideoEncodeH265RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265RateControlStructureFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT + operator^( VideoEncodeH265RateControlStructureFlagBitsEXT bit0, + VideoEncodeH265RateControlStructureFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeH265RateControlStructureFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlStructureFlagsEXT + operator~( VideoEncodeH265RateControlStructureFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeH265RateControlStructureFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265RateControlStructureFlagBitsEXT::eFlat ) + result += "Flat | "; + if ( value & VideoEncodeH265RateControlStructureFlagBitsEXT::eDyadic ) + result += "Dyadic | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h264 === + + using VideoDecodeH264PictureLayoutFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive ) | + VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines ) | + VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator|( + VideoDecodeH264PictureLayoutFlagBitsEXT bit0, VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator&( + VideoDecodeH264PictureLayoutFlagBitsEXT bit0, VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator^( + VideoDecodeH264PictureLayoutFlagBitsEXT bit0, VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT + operator~( VideoDecodeH264PictureLayoutFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoDecodeH264PictureLayoutFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines ) + result += "InterlacedInterleavedLines | "; + if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes ) + result += "InterlacedSeparatePlanes | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoDecodeH264CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) | + VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV + operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) + result += "D3D11Image | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + result += "D3D11ImageKmt | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) | + VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV + operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + using ViSurfaceCreateFlagsNN = Flags; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + using ConditionalRenderingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT + operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( ConditionalRenderingFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) + result += "Inverted | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_display_surface_counter === + + using SurfaceCounterFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT + operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceCounterFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) + result += "Vblank | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_viewport_swizzle === + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_discard_rectangles === + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_conservative_rasterization === + + using PipelineRasterizationConservativeStateCreateFlagsEXT = + Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_depth_clip_enable === + + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_performance_query === + + using PerformanceCounterDescriptionFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) | + VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( + PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR + operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PerformanceCounterDescriptionFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) + result += "PerformanceImpacting | "; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + result += "ConcurrentlyImpacted | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AcquireProfilingLockFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + using IOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + using MacOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) | + VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( + DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT + operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) + result += "Verbose | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) + result += "Info | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) + result += "Error | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) | + VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT + operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) + result += "General | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + result += "Performance | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + { + return "{}"; + } + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_fragment_coverage_to_color === + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_KHR_acceleration_structure === + + using GeometryFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, + GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( GeometryFlagsKHR( bits ) ); + } + + using GeometryFlagsNV = GeometryFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GeometryFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + result += "NoDuplicateAnyHitInvocation | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using GeometryInstanceFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | + VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | + VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR + operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( GeometryInstanceFlagsKHR( bits ) ); + } + + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) + result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) + result += "TriangleFlipFacing | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) + result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + result += "ForceNoOpaque | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using BuildAccelerationStructureFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) | + VkFlags( BuildAccelerationStructureFlagBitsKHR::eMotionNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( + BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR + operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BuildAccelerationStructureFlagsKHR( bits ) ); + } + + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) + result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) + result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + result += "LowMemory | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV ) + result += "MotionNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using AccelerationStructureCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) | + VkFlags( AccelerationStructureCreateFlagBitsKHR::eMotionNV ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^( + AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR + operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccelerationStructureCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV ) + result += "MotionNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_framebuffer_mixed_samples === + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_validation_cache === + + using ValidationCacheCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_AMD_pipeline_compiler_control === + + using PipelineCompilerControlFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h265 === + + using VideoDecodeH265CreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + using MetalSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_AMD_shader_core_properties2 === + + using ShaderCorePropertiesFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + { + return "{}"; + } + + //=== VK_NV_coverage_reduction_mode === + + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_headless_surface === + + using HeadlessSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_device_generated_commands === + + using IndirectStateFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV + operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectStateFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( IndirectStateFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) + result += "FlagFrontface | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) | + VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( + IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV + operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) + result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + result += "UnorderedSequences | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_device_memory_report === + + using DeviceMemoryReportFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT ) + { + return "{}"; + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + using VideoEncodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator|( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator&( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR + operator^( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator~( VideoEncodeFlagBitsKHR bits ) + VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeRateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeRateControlFlagBitsKHR::eDefault ) | + VkFlags( VideoEncodeRateControlFlagBitsKHR::eReserved0 ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator|( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator&( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator^( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR + operator~( VideoEncodeRateControlFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeRateControlFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeRateControlFlagBitsKHR::eReserved0 ) + result += "Reserved0 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + using VideoEncodeRateControlModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) | + VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) | + VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator|( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator&( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator^( + VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return VideoEncodeRateControlModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR + operator~( VideoEncodeRateControlModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( VideoEncodeRateControlModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) | + VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV + operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) + result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) + result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + result += "EnableAutomaticCheckpoints | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_ray_tracing_motion_blur === + + using AccelerationStructureMotionInfoFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagsNV ) + { + return "{}"; + } + + using AccelerationStructureMotionInstanceFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagsNV ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + using DirectFBSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + using ImageFormatConstraintsFlagsFUCHSIA = Flags; + + VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagsFUCHSIA ) + { + return "{}"; + } + + using ImageConstraintsInfoFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) | + VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) | + VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) | + VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) | + VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional ) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA + operator|( ImageConstraintsInfoFlagBitsFUCHSIA bit0, ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA + operator&( ImageConstraintsInfoFlagBitsFUCHSIA bit0, ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA + operator^( ImageConstraintsInfoFlagBitsFUCHSIA bit0, ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA + operator~( ImageConstraintsInfoFlagBitsFUCHSIA bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageConstraintsInfoFlagsFUCHSIA( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagsFUCHSIA value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) + result += "CpuReadRarely | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) + result += "CpuReadOften | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) + result += "CpuWriteRarely | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) + result += "CpuWriteOften | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional ) + result += "ProtectedOptional | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + using ScreenSurfaceCreateFlagsQNX = Flags; + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_fuchsia.h b/Externals/Vulkan/Include/vulkan/vulkan_fuchsia.h index 81ebe55d313e..61774ff9cb71 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_fuchsia.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_fuchsia.h @@ -2,19 +2,9 @@ #define VULKAN_FUCHSIA_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -50,6 +40,217 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( VkSurfaceKHR* pSurface); #endif + +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); +#endif + + +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t zirconHandle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); +#endif + + +#define VK_FUCHSIA_buffer_collection 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) +#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2 +#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection" +typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA; + +typedef enum VkImageConstraintsInfoFlagBitsFUCHSIA { + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA = 0x00000001, + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA = 0x00000002, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA = 0x00000004, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA = 0x00000008, + VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA = 0x00000010, + VK_IMAGE_CONSTRAINTS_INFO_FLAG_BITS_MAX_ENUM_FUCHSIA = 0x7FFFFFFF +} VkImageConstraintsInfoFlagBitsFUCHSIA; +typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA; +typedef struct VkBufferCollectionCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + zx_handle_t collectionToken; +} VkBufferCollectionCreateInfoFUCHSIA; + +typedef struct VkImportMemoryBufferCollectionFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkImportMemoryBufferCollectionFUCHSIA; + +typedef struct VkBufferCollectionImageCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionImageCreateInfoFUCHSIA; + +typedef struct VkBufferCollectionConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t minBufferCount; + uint32_t maxBufferCount; + uint32_t minBufferCountForCamping; + uint32_t minBufferCountForDedicatedSlack; + uint32_t minBufferCountForSharedSlack; +} VkBufferCollectionConstraintsInfoFUCHSIA; + +typedef struct VkBufferConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCreateInfo createInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; +} VkBufferConstraintsInfoFUCHSIA; + +typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionBufferCreateInfoFUCHSIA; + +typedef struct VkSysmemColorSpaceFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t colorSpace; +} VkSysmemColorSpaceFUCHSIA; + +typedef struct VkBufferCollectionPropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; + uint32_t bufferCount; + uint32_t createInfoIndex; + uint64_t sysmemPixelFormat; + VkFormatFeatureFlags formatFeatures; + VkSysmemColorSpaceFUCHSIA sysmemColorSpaceIndex; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkBufferCollectionPropertiesFUCHSIA; + +typedef struct VkImageFormatConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImageCreateInfo imageCreateInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkImageFormatConstraintsFlagsFUCHSIA flags; + uint64_t sysmemPixelFormat; + uint32_t colorSpaceCount; + const VkSysmemColorSpaceFUCHSIA* pColorSpaces; +} VkImageFormatConstraintsInfoFUCHSIA; + +typedef struct VkImageConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t formatConstraintsCount; + const VkImageFormatConstraintsInfoFUCHSIA* pFormatConstraints; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; + VkImageConstraintsInfoFlagsFUCHSIA flags; +} VkImageConstraintsInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA( + VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferCollectionFUCHSIA* pCollection); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionImageConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA* pProperties); +#endif + #ifdef __cplusplus } #endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_funcs.hpp b/Externals/Vulkan/Include/vulkan/vulkan_funcs.hpp new file mode 100644 index 000000000000..8d556d68b758 --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_funcs.hpp @@ -0,0 +1,20957 @@ +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_FUNCS_HPP +#define VULKAN_FUNCS_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pInstance ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Instance instance; + Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Instance instance; + Result result = static_cast( + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyInstance( m_instance, + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDevices( + m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + } + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector physicalDevices( physicalDeviceAllocator ); + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( + m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + } + return createResultValue( + result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector queueFamilyProperties( + queueFamilyPropertiesAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, + reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetInstanceProcAddr( m_instance, pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceProcAddr( m_device, pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceProcAddr( m_device, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDevice ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Device device; + Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Device device; + Result result = static_cast( + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + ObjectDestroy deleter( allocator, d ); + return createResultValue( + result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDevice( m_device, + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateInstanceExtensionProperties( + pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSubmit( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkQueueSubmit( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueWaitIdle( m_queue ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateMemory( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMemory ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + ObjectFree deleter( *this, allocator, d ); + return createResultValue( + result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( + m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + void * pData; + Result result = static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::flushMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkFlushMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::invalidateMappedMemoryRanges( + ArrayProxy const & memoryRanges, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( + m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( + m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + return committedMemoryInBytes; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindBufferMemory( m_device, + static_cast( buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindImageMemory( m_device, + static_cast( image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( + m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( + m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirementsAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirementsAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( + m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( + m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + sparseImageFormatPropertiesAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueBindSparse( m_queue, + bindInfoCount, + reinterpret_cast( pBindInfo ), + static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkQueueBindSparse( m_queue, + bindInfo.size(), + reinterpret_cast( bindInfo.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateFence( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( + m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetFences( ArrayProxy const & fences, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitForFences( + m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkWaitForFences( m_device, + fences.size(), + reinterpret_cast( fences.data() ), + static_cast( waitAll ), + timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSemaphore( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSemaphore ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( + m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateEvent( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pEvent ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( + m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", + { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateQueryPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pQueryPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( + m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast( stride ), + static_cast( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy const & data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + data, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + DataType data; + Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + return createResultValue( result, + data, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( + m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBufferView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImage( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pImage ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( + m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + return layout; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImageView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( + m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaderModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, + static_cast( pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkMergePipelineCaches( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Pipeline pipeline; + Result result = static_cast( + d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Pipeline pipeline; + Result result = static_cast( + d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( + m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSampler( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSampler ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( + m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSetLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( + d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkResetDescriptorPool( + m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector descriptorSets( + allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector descriptorSets( + allocateInfo.descriptorSetCount, descriptorSetAllocator ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, DescriptorSetAllocator> uniqueDescriptorSets; + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, DescriptorSetAllocator> uniqueDescriptorSets( + descriptorSetAllocator ); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( + d.vkAllocateDescriptorSets( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSets[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkFreeDescriptorSets( m_device, + static_cast( descriptorPool ), + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSets( m_device, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast( pDescriptorCopies ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateFramebuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFramebuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderAreaGranularity( + m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + return granularity; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCommandPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCommandPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkResetCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector commandBuffers( + allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector commandBuffers( + allocateInfo.commandBufferCount, commandBufferAllocator ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, CommandBufferAllocator> uniqueCommandBuffers; + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, CommandBufferAllocator> uniqueCommandBuffers( + commandBufferAllocator ); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( + d.vkAllocateCommandBuffers( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffers[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( m_device, + static_cast( commandPool ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( + const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::end( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( + VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipeline( + m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewport( + m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissor( + m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirect( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ), + static_cast( filter ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdFillBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pColor ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pDepthStencil ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearAttachments( m_commandBuffer, + attachmentCount, + reinterpret_cast( pAttachments ), + rectCount, + reinterpret_cast( pRects ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQuery( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( queryPool ), + query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyQueryPoolResults( m_commandBuffer, + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteCommands( + m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::executeCommands( ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteCommands( + m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t apiVersion; + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindBufferMemory2( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindBufferMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindImageMemory2( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + } + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } + + template < + typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( + PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + } + return createResultValue( + result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, + reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector queueFamilyProperties( + queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount, + structureChainAllocator ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPool( + m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue2( + m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerYcbcrConversionUnique( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( + d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorUpdateTemplateUnique( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass2( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint64_t value; + Result result = + static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( + const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( + const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getToolProperties( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceToolProperties( + m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector toolProperties; + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolProperties( + m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + } + + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolProperties( + m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetPrivateData( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkSetPrivateData( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateData( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint64_t data; + d.vkGetPrivateData( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + &data ); + return data; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2( + m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2( + m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2( + m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2( + m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSubmit2( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit2( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkQueueSubmit2( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2( m_commandBuffer, + reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2( m_commandBuffer, + reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2( m_commandBuffer, + reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2( m_commandBuffer, + reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullMode( m_commandBuffer, static_cast( cullMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFace( m_commandBuffer, static_cast( frontFace ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast( primitiveTopology ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCount( ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCount( + m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCount( ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCount( + m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast( depthTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast( depthWriteEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast( depthCompareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast( stencilTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOp( m_commandBuffer, + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast( depthBiasEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + } + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirements( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirements( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirements( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirements( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirements( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetDeviceImageMemoryRequirements( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_surface === + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( pSupported ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Bool32 supported; + Result result = + static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + pSwapchainImageCount, + reinterpret_cast( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + ImageAllocator & imageAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( d.vkGetSwapchainImagesKHR( m_device, + static_cast( swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( + const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( + result, + VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( + result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( pModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( + d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, + static_cast( surface ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Rect2DAllocator & rect2DAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector rects( rect2DAllocator ); + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImage2KHR( + m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, + imageIndex, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( + DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + } + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + DisplayKHRAllocator & displayKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector displays( displayKHRAllocator ); + uint32_t displayCount; + Result result; + do + { + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + } + return createResultValue( + result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast( + d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, + static_cast( mode ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display_swapchain === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainsKHR( + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainsKHR( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector swapchains( createInfos.size(), + swapchainKHRAllocator ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains; + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchains[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + std::vector swapchains( createInfos.size() ); + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchains[i], deleter ) ); + } + } + return createResultValue( + result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + SwapchainKHR swapchain; + Result result = static_cast( + d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( + uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( + uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( + const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( + d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( m_instance, + static_cast( callback ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_marker === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( pVideoProfile ), + reinterpret_cast( pCapabilities ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + Result result = static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = + structureChain.template get(); + Result result = static_cast( + d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast( pVideoFormatProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + } + return createResultValue( + result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector videoFormatProperties( + videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + } + return createResultValue( + result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSession ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + return createResultValue( result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( m_device, + static_cast( videoSession ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + pVideoSessionMemoryRequirementsCount, + reinterpret_cast( pVideoSessionMemoryRequirements ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector videoSessionMemoryRequirements; + uint32_t videoSessionMemoryRequirementsCount; + Result result; + do + { + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, static_cast( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + if ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + } + return createResultValue( result, + videoSessionMemoryRequirements, + VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + } + + template < + typename VideoGetMemoryPropertiesKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector videoSessionMemoryRequirements( + videoGetMemoryPropertiesKHRAllocator ); + uint32_t videoSessionMemoryRequirementsCount; + Result result; + do + { + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, static_cast( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( + m_device, + static_cast( videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + if ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + } + return createResultValue( result, + videoSessionMemoryRequirements, + VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + videoSessionBindMemoryCount, + reinterpret_cast( pVideoSessionBindMemories ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + ArrayProxy const & videoSessionBindMemories, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindVideoSessionMemoryKHR( + m_device, + static_cast( videoSession ), + videoSessionBindMemories.size(), + reinterpret_cast( videoSessionBindMemories.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSessionParameters ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + return createResultValue( + result, videoSessionParameters, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createVideoSessionParametersKHRUnique( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + videoSessionParameters, + VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUpdateVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pUpdateInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkUpdateVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( pBeginInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( pEndCodingInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdControlVideoCodingKHR( m_commandBuffer, + reinterpret_cast( pCodingControlInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdControlVideoCodingKHR( m_commandBuffer, + reinterpret_cast( &codingControlInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( pFrameInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &frameInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQueryIndexedEXT( + m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NVX_image_view_handle === + + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + Result result = static_cast( + d.vkGetImageViewAddressNVX( m_device, + static_cast( imageView ), + reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector info; + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector info( uint8_tAllocator ); + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_dynamic_rendering === + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderingKHR( m_commandBuffer ); + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, + externalImageFormatProperties, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + HANDLE handle; + Result result = + static_cast( d.vkGetMemoryWin32HandleNV( m_device, + static_cast( memory ), + static_cast( handleType ), + &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, + reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + pQueueFamilyPropertyCount, + reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector queueFamilyProperties( + queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount, + structureChainAllocator ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( + m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_device_group === + + template + VULKAN_HPP_INLINE void + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBaseKHR( + m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPoolKHR( + m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + } + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } + + template < + typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( + PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = + static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + } + return createResultValue( result, + physicalDeviceGroupProperties, + VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_memory_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( + m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( + result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + int fd; + Result result = static_cast( + d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast( + d.vkGetMemoryFdPropertiesKHR( m_device, + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( + result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_semaphore_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + HANDLE handle; + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkImportSemaphoreFdKHR( + m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + int fd; + Result result = static_cast( + d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_push_descriptor === + + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_conditional_rendering === + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginConditionalRenderingEXT( + m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( + result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_clip_space_w_scaling === + + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + ArrayProxy const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( + Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkDisplayPowerControlEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( + d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint64_t counterValue; + Result result = + static_cast( d.vkGetSwapchainCounterEXT( m_device, + static_cast( swapchain ), + static_cast( counter ), + &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, + static_cast( swapchain ), + reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( + result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + } + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + } + + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector presentationTimings( + pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( + m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + } + return createResultValue( + result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_discard_rectangles === + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangleCount, + reinterpret_cast( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_hdr_metadata === + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetHdrMetadataEXT( m_device, + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2KHR( m_commandBuffer, + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( + m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkImportFenceWin32HandleKHR( + m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + HANDLE handle; + Result result = static_cast( d.vkGetFenceWin32HandleKHR( + m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + int fd; + Result result = static_cast( + d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::pair, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::pair, + std::vector> + data; + std::vector & counters = data.first; + std::vector & counterDescriptions = + data.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::pair, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::pair, + std::vector> + data( std::piecewise_construct, + std::forward_as_tuple( performanceCounterKHRAllocator ), + std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector & counters = data.first; + std::vector & counterDescriptions = + data.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == Result::eIncomplete ); + if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); + return numPasses; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( + const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkReleaseProfilingLockKHR( m_device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = + structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + } + return createResultValue( + result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( + DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, + static_cast( display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + Result result = static_cast( + d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + return createResultValue( + result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( + m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( + d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, &buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + m_device, &buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( + result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + struct AHardwareBuffer * buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( + result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + + template + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, + reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_memory_requirements2 === + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresIndirectKHR( + m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( + m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBuildAccelerationStructuresKHR( + m_device, + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = static_cast( d.vkBuildAccelerationStructuresKHR( + m_device, + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyAccelerationStructureKHR( m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkCopyAccelerationStructureKHR( m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy const & data, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + DataType data; + Result result = static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( + m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, + reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( + m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( + m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetAccelerationStructureDeviceAddressKHR( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( + m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + return compatibility; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureBuildSizesKHR( + m_device, + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( + m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + return sizeInfo; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversionKHR( m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindBufferMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindImageMemory2KHR( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, + static_cast( image ), + reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( + d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( m_device, + static_cast( validationCache ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkMergeValidationCachesEXT( m_device, + static_cast( dstCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( + m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast( d.vkGetValidationCacheDataEXT( m_device, + static_cast( validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_shading_rate_image === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadingRateImageNV( + m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, + firstViewport, + viewportCount, + reinterpret_cast( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( + result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + accelerationStructure, + VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( m_device, + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = + structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysNV( m_commandBuffer, + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( + result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( + result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + DataType data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + ArrayProxy const & data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + DataType data; + Result result = static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, + static_cast( accelerationStructure ), + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( + m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( + m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( + result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_buffer_marker === + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + } + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector timeDomains( timeDomainEXTAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + } + return createResultValue( + result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::pair, uint64_t> data( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::pair, uint64_t> data( + std::piecewise_construct, + std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), + std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::pair data; + uint64_t & timestamp = data.first; + uint64_t & maxDeviation = data.second; + Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, + 1, + reinterpret_cast( ×tampInfo ), + ×tamp, + &maxDeviation ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_mesh_shader === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectNV( + m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast( &checkpointMarker ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointDataNV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_timeline_semaphore === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint64_t value; + Result result = + static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( pInitializeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkInitializePerformanceApiINTEL( + m_device, reinterpret_cast( &initializeInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUninitializePerformanceApiINTEL( m_device ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( + m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( + m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + return createResultValue( + result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::acquirePerformanceConfigurationINTELUnique( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( + m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, + configuration, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( + m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( + m_queue, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( pValue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + Result result = + static_cast( d.vkGetPerformanceParameterINTEL( m_device, + static_cast( parameter ), + reinterpret_cast( &value ) ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_display_native_hdr === + + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLocalDimmingAMD( + m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( + uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + pFragmentShadingRateCount, + reinterpret_cast( pFragmentShadingRates ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector + fragmentShadingRates; + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + } + return createResultValue( + result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getFragmentShadingRatesKHR( + PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector + fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + } + return createResultValue( + result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, + reinterpret_cast( pFragmentSize ), + reinterpret_cast( combinerOps ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_buffer_device_address === + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_tooling_info === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector toolProperties; + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } + + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + } + return createResultValue( + result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } + + template < + typename CooperativeMatrixPropertiesNVAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( + CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_coverage_reduction_mode === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + pCombinationCount, + reinterpret_cast( pCombinations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector combinations; + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + } + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + } + return createResultValue( result, + combinations, + VULKAN_HPP_NAMESPACE_STRING + "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + } + return createResultValue( + result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_buffer_device_address === + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, + reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( + m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_line_rasterization === + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCountEXT( + m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCountEXT( ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCountEXT( + m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCountEXT( + m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOpEXT( m_commandBuffer, + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + Result result = static_cast( + d.vkCreateDeferredOperationKHR( m_device, + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( m_device, + static_cast( operation ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, + VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties; + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } + + template < + typename PipelineExecutablePropertiesKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutablePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector properties( + pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( + m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + } + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector statistics; + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + } + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } + + template < + typename PipelineExecutableStatisticKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector statistics( + pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + } + return createResultValue( + result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector + internalRepresentations; + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + } + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } + + template < + typename PipelineExecutableInternalRepresentationKHRAllocator, + typename Dispatch, + typename B, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector + internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + } + return createResultValue( result, + internalRepresentations, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_generated_commands === + + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsNV( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( + result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, + indirectCommandsLayout, + VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique", + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( + int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::acquireDrmDisplayEXT( + int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( display ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_private_data === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + Result result = static_cast( + d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlotEXT( m_device, + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkSetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + uint64_t data; + d.vkGetPrivateDataEXT( m_device, + static_cast( objectType ), + objectHandle, + static_cast( privateDataSlot ), + &data ); + return data; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( pEncodeInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2KHR( + m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2KHR( + m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2KHR( + m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2KHR( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2KHR( + m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSubmit2KHR( + m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkQueueSubmit2KHR( m_queue, + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( + m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( + m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_fragment_shading_rate_enums === + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer, + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_KHR_copy_commands2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, + reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, + reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, + reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, + reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( + VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( + d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + ObjectRelease deleter( *this, d ); + return createResultValue( + result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( + uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( + uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, + pipelines, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines; + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename B, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i = 0; i < createInfos.size(); i++ ) + { + uniquePipelines.push_back( UniqueHandle( pipelines[i], deleter ) ); + } + } + return createResultValue( result, + std::move( uniquePipelines ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Pipeline pipeline; + Result result = static_cast( + d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, + pipeline, + VULKAN_HPP_NAMESPACE_STRING + "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, + deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = + static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + DataType data; + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." ) + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( T ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + DataType data; + Result result = + static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, + static_cast( pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + return createResultValue( + result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirectKHR( + m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirectKHR( + m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE DeviceSize + Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( + m_device, static_cast( pipeline ), group, static_cast( groupShader ) ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); + } + + //=== VK_EXT_vertex_input_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetVertexInputEXT( + m_commandBuffer, + vertexBindingDescriptionCount, + reinterpret_cast( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast( pVertexAttributeDescriptions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetVertexInputEXT( + m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + zx_handle_t zirconHandle; + Result result = static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + return createResultValue( + result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( + m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( pMemoryZirconHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + Result result = static_cast( d.vkGetMemoryZirconHandlePropertiesFUCHSIA( + m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + return createResultValue( result, + memoryZirconHandleProperties, + VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( pGetZirconHandleInfo ), + pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + zx_handle_t zirconHandle; + Result result = static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + m_device, + reinterpret_cast( &getZirconHandleInfo ), + &zirconHandle ) ); + return createResultValue( + result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateBufferCollectionFUCHSIA( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCollection ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + Result result = static_cast( d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + return createResultValue( + result, collection, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::createBufferCollectionFUCHSIAUnique( + const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + Result result = static_cast( d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, collection, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionImageConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( pImageConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setBufferCollectionImageConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( &imageConstraintsInfo ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionBufferConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( pBufferConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setBufferCollectionBufferConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + Result result = static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( &bufferConstraintsInfo ) ) ); + return createResultValue( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( m_device, + static_cast( collection ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( m_device, + static_cast( collection ), + reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( + static_cast( allocator ) ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + Result result = static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( &properties ) ) ); + return createResultValue( + result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + Result result = static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ) ); + return createResultValue( result, + maxWorkgroupSize, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); + } + + //=== VK_HUAWEI_invocation_mask === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindInvocationMaskHUAWEI( + m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryRemoteAddressNV( + const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryRemoteAddressNV( + m_device, + reinterpret_cast( pMemoryGetRemoteAddressInfo ), + reinterpret_cast( pAddress ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryRemoteAddressNV( + const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + Result result = static_cast( d.vkGetMemoryRemoteAddressNV( + m_device, + reinterpret_cast( &memoryGetRemoteAddressInfo ), + reinterpret_cast( &address ) ) ); + return createResultValue( result, address, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_extended_dynamic_state2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast( depthBiasEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast( logicOp ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType::type + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( + d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( + result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter ); + } +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( + uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( + uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteEnableEXT( + m_commandBuffer, attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteEnableEXT( + m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_multi_draw === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiEXT( m_commandBuffer, + drawCount, + reinterpret_cast( pVertexInfo ), + instanceCount, + firstInstance, + stride ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiEXT( ArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiEXT( m_commandBuffer, + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, + drawCount, + reinterpret_cast( pIndexInfo ), + instanceCount, + firstInstance, + stride, + pVertexOffset ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( + ArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Optional vertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + stride, + static_cast( vertexOffset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_pageable_device_local_memory === + + template + VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + float priority, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast( memory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + template + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirementsKHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirementsKHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + d.vkGetDeviceImageMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename B, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_ggp.h b/Externals/Vulkan/Include/vulkan/vulkan_ggp.h index fd306131c322..19dfd22617e9 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_ggp.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_ggp.h @@ -2,19 +2,9 @@ #define VULKAN_GGP_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Externals/Vulkan/Include/vulkan/vulkan_handles.hpp b/Externals/Vulkan/Include/vulkan/vulkan_handles.hpp new file mode 100644 index 000000000000..aa81afc94a96 --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_handles.hpp @@ -0,0 +1,14971 @@ +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HANDLES_HPP +#define VULKAN_HANDLES_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + //=================================== + //=== STRUCT forward declarations === + //=================================== + + //=== VK_VERSION_1_0 === + struct Extent2D; + struct Extent3D; + struct Offset2D; + struct Offset3D; + struct Rect2D; + struct BaseInStructure; + struct BaseOutStructure; + struct BufferMemoryBarrier; + struct DispatchIndirectCommand; + struct DrawIndexedIndirectCommand; + struct DrawIndirectCommand; + struct ImageMemoryBarrier; + struct MemoryBarrier; + struct PipelineCacheHeaderVersionOne; + struct AllocationCallbacks; + struct ApplicationInfo; + struct FormatProperties; + struct ImageFormatProperties; + struct InstanceCreateInfo; + struct MemoryHeap; + struct MemoryType; + struct PhysicalDeviceFeatures; + struct PhysicalDeviceLimits; + struct PhysicalDeviceMemoryProperties; + struct PhysicalDeviceProperties; + struct PhysicalDeviceSparseProperties; + struct QueueFamilyProperties; + struct DeviceCreateInfo; + struct DeviceQueueCreateInfo; + struct ExtensionProperties; + struct LayerProperties; + struct SubmitInfo; + struct MappedMemoryRange; + struct MemoryAllocateInfo; + struct MemoryRequirements; + struct BindSparseInfo; + struct ImageSubresource; + struct SparseBufferMemoryBindInfo; + struct SparseImageFormatProperties; + struct SparseImageMemoryBind; + struct SparseImageMemoryBindInfo; + struct SparseImageMemoryRequirements; + struct SparseImageOpaqueMemoryBindInfo; + struct SparseMemoryBind; + struct FenceCreateInfo; + struct SemaphoreCreateInfo; + struct EventCreateInfo; + struct QueryPoolCreateInfo; + struct BufferCreateInfo; + struct BufferViewCreateInfo; + struct ImageCreateInfo; + struct SubresourceLayout; + struct ComponentMapping; + struct ImageSubresourceRange; + struct ImageViewCreateInfo; + struct ShaderModuleCreateInfo; + struct PipelineCacheCreateInfo; + struct ComputePipelineCreateInfo; + struct GraphicsPipelineCreateInfo; + struct PipelineColorBlendAttachmentState; + struct PipelineColorBlendStateCreateInfo; + struct PipelineDepthStencilStateCreateInfo; + struct PipelineDynamicStateCreateInfo; + struct PipelineInputAssemblyStateCreateInfo; + struct PipelineMultisampleStateCreateInfo; + struct PipelineRasterizationStateCreateInfo; + struct PipelineShaderStageCreateInfo; + struct PipelineTessellationStateCreateInfo; + struct PipelineVertexInputStateCreateInfo; + struct PipelineViewportStateCreateInfo; + struct SpecializationInfo; + struct SpecializationMapEntry; + struct StencilOpState; + struct VertexInputAttributeDescription; + struct VertexInputBindingDescription; + struct Viewport; + struct PipelineLayoutCreateInfo; + struct PushConstantRange; + struct SamplerCreateInfo; + struct CopyDescriptorSet; + struct DescriptorBufferInfo; + struct DescriptorImageInfo; + struct DescriptorPoolCreateInfo; + struct DescriptorPoolSize; + struct DescriptorSetAllocateInfo; + struct DescriptorSetLayoutBinding; + struct DescriptorSetLayoutCreateInfo; + struct WriteDescriptorSet; + struct AttachmentDescription; + struct AttachmentReference; + struct FramebufferCreateInfo; + struct RenderPassCreateInfo; + struct SubpassDependency; + struct SubpassDescription; + struct CommandPoolCreateInfo; + struct CommandBufferAllocateInfo; + struct CommandBufferBeginInfo; + struct CommandBufferInheritanceInfo; + struct BufferCopy; + struct BufferImageCopy; + struct ClearAttachment; + union ClearColorValue; + struct ClearDepthStencilValue; + struct ClearRect; + union ClearValue; + struct ImageBlit; + struct ImageCopy; + struct ImageResolve; + struct ImageSubresourceLayers; + struct RenderPassBeginInfo; + + //=== VK_VERSION_1_1 === + struct PhysicalDeviceSubgroupProperties; + struct BindBufferMemoryInfo; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + struct BindImageMemoryInfo; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + struct PhysicalDevice16BitStorageFeatures; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + struct MemoryDedicatedRequirements; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + struct MemoryDedicatedAllocateInfo; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + struct MemoryAllocateFlagsInfo; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + struct DeviceGroupRenderPassBeginInfo; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + struct DeviceGroupCommandBufferBeginInfo; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + struct DeviceGroupSubmitInfo; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + struct DeviceGroupBindSparseInfo; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + struct BindBufferMemoryDeviceGroupInfo; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + struct BindImageMemoryDeviceGroupInfo; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + struct PhysicalDeviceGroupProperties; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct DeviceGroupDeviceCreateInfo; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + struct BufferMemoryRequirementsInfo2; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + struct ImageMemoryRequirementsInfo2; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + struct ImageSparseMemoryRequirementsInfo2; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + struct MemoryRequirements2; + using MemoryRequirements2KHR = MemoryRequirements2; + struct SparseImageMemoryRequirements2; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + struct PhysicalDeviceFeatures2; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + struct PhysicalDeviceProperties2; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + struct FormatProperties2; + using FormatProperties2KHR = FormatProperties2; + struct ImageFormatProperties2; + using ImageFormatProperties2KHR = ImageFormatProperties2; + struct PhysicalDeviceImageFormatInfo2; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + struct QueueFamilyProperties2; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + struct PhysicalDeviceMemoryProperties2; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + struct SparseImageFormatProperties2; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + struct PhysicalDeviceSparseImageFormatInfo2; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + struct PhysicalDevicePointClippingProperties; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + struct RenderPassInputAttachmentAspectCreateInfo; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + struct InputAttachmentAspectReference; + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + struct ImageViewUsageCreateInfo; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + struct PipelineTessellationDomainOriginStateCreateInfo; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + struct RenderPassMultiviewCreateInfo; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + struct PhysicalDeviceMultiviewFeatures; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + struct PhysicalDeviceMultiviewProperties; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + struct PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + struct PhysicalDeviceProtectedMemoryFeatures; + struct PhysicalDeviceProtectedMemoryProperties; + struct DeviceQueueInfo2; + struct ProtectedSubmitInfo; + struct SamplerYcbcrConversionCreateInfo; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + struct SamplerYcbcrConversionInfo; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + struct BindImagePlaneMemoryInfo; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct ImagePlaneMemoryRequirementsInfo; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + struct PhysicalDeviceSamplerYcbcrConversionFeatures; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + struct SamplerYcbcrConversionImageFormatProperties; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + struct DescriptorUpdateTemplateEntry; + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + struct DescriptorUpdateTemplateCreateInfo; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + struct ExternalMemoryProperties; + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + struct PhysicalDeviceExternalImageFormatInfo; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + struct ExternalImageFormatProperties; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + struct PhysicalDeviceExternalBufferInfo; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + struct ExternalBufferProperties; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + struct PhysicalDeviceIDProperties; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + struct ExternalMemoryImageCreateInfo; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + struct ExternalMemoryBufferCreateInfo; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + struct ExportMemoryAllocateInfo; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + struct PhysicalDeviceExternalFenceInfo; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + struct ExternalFenceProperties; + using ExternalFencePropertiesKHR = ExternalFenceProperties; + struct ExportFenceCreateInfo; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + struct ExportSemaphoreCreateInfo; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + struct PhysicalDeviceExternalSemaphoreInfo; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + struct ExternalSemaphoreProperties; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + struct PhysicalDeviceMaintenance3Properties; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + struct DescriptorSetLayoutSupport; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + struct PhysicalDeviceShaderDrawParametersFeatures; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + //=== VK_VERSION_1_2 === + struct PhysicalDeviceVulkan11Features; + struct PhysicalDeviceVulkan11Properties; + struct PhysicalDeviceVulkan12Features; + struct PhysicalDeviceVulkan12Properties; + struct ImageFormatListCreateInfo; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + struct RenderPassCreateInfo2; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + struct AttachmentDescription2; + using AttachmentDescription2KHR = AttachmentDescription2; + struct AttachmentReference2; + using AttachmentReference2KHR = AttachmentReference2; + struct SubpassDescription2; + using SubpassDescription2KHR = SubpassDescription2; + struct SubpassDependency2; + using SubpassDependency2KHR = SubpassDependency2; + struct SubpassBeginInfo; + using SubpassBeginInfoKHR = SubpassBeginInfo; + struct SubpassEndInfo; + using SubpassEndInfoKHR = SubpassEndInfo; + struct PhysicalDevice8BitStorageFeatures; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + struct ConformanceVersion; + using ConformanceVersionKHR = ConformanceVersion; + struct PhysicalDeviceDriverProperties; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + struct PhysicalDeviceShaderAtomicInt64Features; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + struct PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + struct PhysicalDeviceFloatControlsProperties; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + struct DescriptorSetLayoutBindingFlagsCreateInfo; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + struct PhysicalDeviceDescriptorIndexingFeatures; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + struct PhysicalDeviceDescriptorIndexingProperties; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + struct DescriptorSetVariableDescriptorCountAllocateInfo; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + struct DescriptorSetVariableDescriptorCountLayoutSupport; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + struct SubpassDescriptionDepthStencilResolve; + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + struct PhysicalDeviceDepthStencilResolveProperties; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + struct PhysicalDeviceScalarBlockLayoutFeatures; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + struct ImageStencilUsageCreateInfo; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + struct SamplerReductionModeCreateInfo; + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + struct PhysicalDeviceSamplerFilterMinmaxProperties; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + struct PhysicalDeviceVulkanMemoryModelFeatures; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + struct PhysicalDeviceImagelessFramebufferFeatures; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + struct FramebufferAttachmentsCreateInfo; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + struct FramebufferAttachmentImageInfo; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + struct RenderPassAttachmentBeginInfo; + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + struct PhysicalDeviceUniformBufferStandardLayoutFeatures; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + struct AttachmentReferenceStencilLayout; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + struct AttachmentDescriptionStencilLayout; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + struct PhysicalDeviceHostQueryResetFeatures; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + struct PhysicalDeviceTimelineSemaphoreFeatures; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + struct PhysicalDeviceTimelineSemaphoreProperties; + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + struct SemaphoreTypeCreateInfo; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + struct TimelineSemaphoreSubmitInfo; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + struct SemaphoreWaitInfo; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + struct SemaphoreSignalInfo; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + struct PhysicalDeviceBufferDeviceAddressFeatures; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + struct BufferDeviceAddressInfo; + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + struct BufferOpaqueCaptureAddressCreateInfo; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + struct MemoryOpaqueCaptureAddressAllocateInfo; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + struct DeviceMemoryOpaqueCaptureAddressInfo; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + //=== VK_VERSION_1_3 === + struct PhysicalDeviceVulkan13Features; + struct PhysicalDeviceVulkan13Properties; + struct PipelineCreationFeedbackCreateInfo; + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + struct PipelineCreationFeedback; + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; + struct PhysicalDeviceShaderTerminateInvocationFeatures; + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + struct PhysicalDeviceToolProperties; + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + struct PhysicalDevicePrivateDataFeatures; + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + struct DevicePrivateDataCreateInfo; + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + struct PrivateDataSlotCreateInfo; + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + struct PhysicalDevicePipelineCreationCacheControlFeatures; + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + struct MemoryBarrier2; + using MemoryBarrier2KHR = MemoryBarrier2; + struct BufferMemoryBarrier2; + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; + struct ImageMemoryBarrier2; + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + struct DependencyInfo; + using DependencyInfoKHR = DependencyInfo; + struct SubmitInfo2; + using SubmitInfo2KHR = SubmitInfo2; + struct SemaphoreSubmitInfo; + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + struct CommandBufferSubmitInfo; + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + struct PhysicalDeviceSynchronization2Features; + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + struct PhysicalDeviceImageRobustnessFeatures; + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + struct CopyBufferInfo2; + using CopyBufferInfo2KHR = CopyBufferInfo2; + struct CopyImageInfo2; + using CopyImageInfo2KHR = CopyImageInfo2; + struct CopyBufferToImageInfo2; + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; + struct CopyImageToBufferInfo2; + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + struct BlitImageInfo2; + using BlitImageInfo2KHR = BlitImageInfo2; + struct ResolveImageInfo2; + using ResolveImageInfo2KHR = ResolveImageInfo2; + struct BufferCopy2; + using BufferCopy2KHR = BufferCopy2; + struct ImageCopy2; + using ImageCopy2KHR = ImageCopy2; + struct ImageBlit2; + using ImageBlit2KHR = ImageBlit2; + struct BufferImageCopy2; + using BufferImageCopy2KHR = BufferImageCopy2; + struct ImageResolve2; + using ImageResolve2KHR = ImageResolve2; + struct PhysicalDeviceSubgroupSizeControlFeatures; + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + struct PhysicalDeviceSubgroupSizeControlProperties; + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + struct PhysicalDeviceInlineUniformBlockFeatures; + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + struct PhysicalDeviceInlineUniformBlockProperties; + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + struct WriteDescriptorSetInlineUniformBlock; + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + struct DescriptorPoolInlineUniformBlockCreateInfo; + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + struct PhysicalDeviceTextureCompressionASTCHDRFeatures; + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + struct RenderingInfo; + using RenderingInfoKHR = RenderingInfo; + struct RenderingAttachmentInfo; + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + struct PipelineRenderingCreateInfo; + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + struct PhysicalDeviceDynamicRenderingFeatures; + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + struct CommandBufferInheritanceRenderingInfo; + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; + struct PhysicalDeviceShaderIntegerDotProductFeatures; + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + struct PhysicalDeviceShaderIntegerDotProductProperties; + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + struct PhysicalDeviceTexelBufferAlignmentProperties; + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + struct FormatProperties3; + using FormatProperties3KHR = FormatProperties3; + struct PhysicalDeviceMaintenance4Features; + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + struct PhysicalDeviceMaintenance4Properties; + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + struct DeviceBufferMemoryRequirements; + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + struct DeviceImageMemoryRequirements; + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + //=== VK_KHR_surface === + struct SurfaceCapabilitiesKHR; + struct SurfaceFormatKHR; + + //=== VK_KHR_swapchain === + struct SwapchainCreateInfoKHR; + struct PresentInfoKHR; + struct ImageSwapchainCreateInfoKHR; + struct BindImageMemorySwapchainInfoKHR; + struct AcquireNextImageInfoKHR; + struct DeviceGroupPresentCapabilitiesKHR; + struct DeviceGroupPresentInfoKHR; + struct DeviceGroupSwapchainCreateInfoKHR; + + //=== VK_KHR_display === + struct DisplayModeCreateInfoKHR; + struct DisplayModeParametersKHR; + struct DisplayModePropertiesKHR; + struct DisplayPlaneCapabilitiesKHR; + struct DisplayPlanePropertiesKHR; + struct DisplayPropertiesKHR; + struct DisplaySurfaceCreateInfoKHR; + + //=== VK_KHR_display_swapchain === + struct DisplayPresentInfoKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + struct XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + struct XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + struct WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + struct AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + struct Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + struct DebugReportCallbackCreateInfoEXT; + + //=== VK_AMD_rasterization_order === + struct PipelineRasterizationStateRasterizationOrderAMD; + + //=== VK_EXT_debug_marker === + struct DebugMarkerObjectNameInfoEXT; + struct DebugMarkerObjectTagInfoEXT; + struct DebugMarkerMarkerInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + struct QueueFamilyQueryResultStatusProperties2KHR; + struct VideoQueueFamilyProperties2KHR; + struct VideoProfileKHR; + struct VideoProfilesKHR; + struct VideoCapabilitiesKHR; + struct PhysicalDeviceVideoFormatInfoKHR; + struct VideoFormatPropertiesKHR; + struct VideoPictureResourceKHR; + struct VideoReferenceSlotKHR; + struct VideoGetMemoryPropertiesKHR; + struct VideoBindMemoryKHR; + struct VideoSessionCreateInfoKHR; + struct VideoSessionParametersCreateInfoKHR; + struct VideoSessionParametersUpdateInfoKHR; + struct VideoBeginCodingInfoKHR; + struct VideoEndCodingInfoKHR; + struct VideoCodingControlInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + struct VideoDecodeInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_dedicated_allocation === + struct DedicatedAllocationImageCreateInfoNV; + struct DedicatedAllocationBufferCreateInfoNV; + struct DedicatedAllocationMemoryAllocateInfoNV; + + //=== VK_EXT_transform_feedback === + struct PhysicalDeviceTransformFeedbackFeaturesEXT; + struct PhysicalDeviceTransformFeedbackPropertiesEXT; + struct PipelineRasterizationStateStreamCreateInfoEXT; + + //=== VK_NVX_binary_import === + struct CuModuleCreateInfoNVX; + struct CuFunctionCreateInfoNVX; + struct CuLaunchInfoNVX; + + //=== VK_NVX_image_view_handle === + struct ImageViewHandleInfoNVX; + struct ImageViewAddressPropertiesNVX; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + struct VideoEncodeH264CapabilitiesEXT; + struct VideoEncodeH264SessionCreateInfoEXT; + struct VideoEncodeH264SessionParametersCreateInfoEXT; + struct VideoEncodeH264SessionParametersAddInfoEXT; + struct VideoEncodeH264VclFrameInfoEXT; + struct VideoEncodeH264EmitPictureParametersEXT; + struct VideoEncodeH264DpbSlotInfoEXT; + struct VideoEncodeH264NaluSliceEXT; + struct VideoEncodeH264ProfileEXT; + struct VideoEncodeH264RateControlInfoEXT; + struct VideoEncodeH264RateControlLayerInfoEXT; + struct VideoEncodeH264QpEXT; + struct VideoEncodeH264FrameSizeEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h265 === + struct VideoEncodeH265CapabilitiesEXT; + struct VideoEncodeH265SessionCreateInfoEXT; + struct VideoEncodeH265SessionParametersCreateInfoEXT; + struct VideoEncodeH265SessionParametersAddInfoEXT; + struct VideoEncodeH265VclFrameInfoEXT; + struct VideoEncodeH265EmitPictureParametersEXT; + struct VideoEncodeH265DpbSlotInfoEXT; + struct VideoEncodeH265NaluSliceEXT; + struct VideoEncodeH265ProfileEXT; + struct VideoEncodeH265ReferenceListsEXT; + struct VideoEncodeH265RateControlInfoEXT; + struct VideoEncodeH265RateControlLayerInfoEXT; + struct VideoEncodeH265QpEXT; + struct VideoEncodeH265FrameSizeEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h264 === + struct VideoDecodeH264ProfileEXT; + struct VideoDecodeH264CapabilitiesEXT; + struct VideoDecodeH264SessionCreateInfoEXT; + struct VideoDecodeH264SessionParametersCreateInfoEXT; + struct VideoDecodeH264SessionParametersAddInfoEXT; + struct VideoDecodeH264PictureInfoEXT; + struct VideoDecodeH264MvcEXT; + struct VideoDecodeH264DpbSlotInfoEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_texture_gather_bias_lod === + struct TextureLODGatherFormatPropertiesAMD; + + //=== VK_AMD_shader_info === + struct ShaderResourceUsageAMD; + struct ShaderStatisticsInfoAMD; + + //=== VK_KHR_dynamic_rendering === + struct RenderingFragmentShadingRateAttachmentInfoKHR; + struct RenderingFragmentDensityMapAttachmentInfoEXT; + struct AttachmentSampleCountInfoAMD; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + struct MultiviewPerViewAttributesInfoNVX; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + struct StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + struct PhysicalDeviceCornerSampledImageFeaturesNV; + + //=== VK_NV_external_memory_capabilities === + struct ExternalImageFormatPropertiesNV; + + //=== VK_NV_external_memory === + struct ExternalMemoryImageCreateInfoNV; + struct ExportMemoryAllocateInfoNV; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + struct ImportMemoryWin32HandleInfoNV; + struct ExportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + struct Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + struct ValidationFlagsEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + struct ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + struct ImageViewASTCDecodeModeEXT; + struct PhysicalDeviceASTCDecodeFeaturesEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + struct ImportMemoryWin32HandleInfoKHR; + struct ExportMemoryWin32HandleInfoKHR; + struct MemoryWin32HandlePropertiesKHR; + struct MemoryGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + struct ImportMemoryFdInfoKHR; + struct MemoryFdPropertiesKHR; + struct MemoryGetFdInfoKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + struct Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + struct ImportSemaphoreWin32HandleInfoKHR; + struct ExportSemaphoreWin32HandleInfoKHR; + struct D3D12FenceSubmitInfoKHR; + struct SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + struct ImportSemaphoreFdInfoKHR; + struct SemaphoreGetFdInfoKHR; + + //=== VK_KHR_push_descriptor === + struct PhysicalDevicePushDescriptorPropertiesKHR; + + //=== VK_EXT_conditional_rendering === + struct ConditionalRenderingBeginInfoEXT; + struct PhysicalDeviceConditionalRenderingFeaturesEXT; + struct CommandBufferInheritanceConditionalRenderingInfoEXT; + + //=== VK_KHR_incremental_present === + struct PresentRegionsKHR; + struct PresentRegionKHR; + struct RectLayerKHR; + + //=== VK_NV_clip_space_w_scaling === + struct ViewportWScalingNV; + struct PipelineViewportWScalingStateCreateInfoNV; + + //=== VK_EXT_display_surface_counter === + struct SurfaceCapabilities2EXT; + + //=== VK_EXT_display_control === + struct DisplayPowerInfoEXT; + struct DeviceEventInfoEXT; + struct DisplayEventInfoEXT; + struct SwapchainCounterCreateInfoEXT; + + //=== VK_GOOGLE_display_timing === + struct RefreshCycleDurationGOOGLE; + struct PastPresentationTimingGOOGLE; + struct PresentTimesInfoGOOGLE; + struct PresentTimeGOOGLE; + + //=== VK_NVX_multiview_per_view_attributes === + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + //=== VK_NV_viewport_swizzle === + struct ViewportSwizzleNV; + struct PipelineViewportSwizzleStateCreateInfoNV; + + //=== VK_EXT_discard_rectangles === + struct PhysicalDeviceDiscardRectanglePropertiesEXT; + struct PipelineDiscardRectangleStateCreateInfoEXT; + + //=== VK_EXT_conservative_rasterization === + struct PhysicalDeviceConservativeRasterizationPropertiesEXT; + struct PipelineRasterizationConservativeStateCreateInfoEXT; + + //=== VK_EXT_depth_clip_enable === + struct PhysicalDeviceDepthClipEnableFeaturesEXT; + struct PipelineRasterizationDepthClipStateCreateInfoEXT; + + //=== VK_EXT_hdr_metadata === + struct HdrMetadataEXT; + struct XYColorEXT; + + //=== VK_KHR_shared_presentable_image === + struct SharedPresentSurfaceCapabilitiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + struct ImportFenceWin32HandleInfoKHR; + struct ExportFenceWin32HandleInfoKHR; + struct FenceGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + struct ImportFenceFdInfoKHR; + struct FenceGetFdInfoKHR; + + //=== VK_KHR_performance_query === + struct PhysicalDevicePerformanceQueryFeaturesKHR; + struct PhysicalDevicePerformanceQueryPropertiesKHR; + struct PerformanceCounterKHR; + struct PerformanceCounterDescriptionKHR; + struct QueryPoolPerformanceCreateInfoKHR; + union PerformanceCounterResultKHR; + struct AcquireProfilingLockInfoKHR; + struct PerformanceQuerySubmitInfoKHR; + + //=== VK_KHR_get_surface_capabilities2 === + struct PhysicalDeviceSurfaceInfo2KHR; + struct SurfaceCapabilities2KHR; + struct SurfaceFormat2KHR; + + //=== VK_KHR_get_display_properties2 === + struct DisplayProperties2KHR; + struct DisplayPlaneProperties2KHR; + struct DisplayModeProperties2KHR; + struct DisplayPlaneInfo2KHR; + struct DisplayPlaneCapabilities2KHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + struct IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + struct MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + struct DebugUtilsLabelEXT; + struct DebugUtilsMessengerCallbackDataEXT; + struct DebugUtilsMessengerCreateInfoEXT; + struct DebugUtilsObjectNameInfoEXT; + struct DebugUtilsObjectTagInfoEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + struct AndroidHardwareBufferUsageANDROID; + struct AndroidHardwareBufferPropertiesANDROID; + struct AndroidHardwareBufferFormatPropertiesANDROID; + struct ImportAndroidHardwareBufferInfoANDROID; + struct MemoryGetAndroidHardwareBufferInfoANDROID; + struct ExternalFormatANDROID; + struct AndroidHardwareBufferFormatProperties2ANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + struct SampleLocationEXT; + struct SampleLocationsInfoEXT; + struct AttachmentSampleLocationsEXT; + struct SubpassSampleLocationsEXT; + struct RenderPassSampleLocationsBeginInfoEXT; + struct PipelineSampleLocationsStateCreateInfoEXT; + struct PhysicalDeviceSampleLocationsPropertiesEXT; + struct MultisamplePropertiesEXT; + + //=== VK_EXT_blend_operation_advanced === + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + struct PipelineColorBlendAdvancedStateCreateInfoEXT; + + //=== VK_NV_fragment_coverage_to_color === + struct PipelineCoverageToColorStateCreateInfoNV; + + //=== VK_KHR_acceleration_structure === + union DeviceOrHostAddressKHR; + union DeviceOrHostAddressConstKHR; + struct AccelerationStructureBuildRangeInfoKHR; + struct AabbPositionsKHR; + using AabbPositionsNV = AabbPositionsKHR; + struct AccelerationStructureGeometryTrianglesDataKHR; + struct TransformMatrixKHR; + using TransformMatrixNV = TransformMatrixKHR; + struct AccelerationStructureBuildGeometryInfoKHR; + struct AccelerationStructureGeometryAabbsDataKHR; + struct AccelerationStructureInstanceKHR; + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + struct AccelerationStructureGeometryInstancesDataKHR; + union AccelerationStructureGeometryDataKHR; + struct AccelerationStructureGeometryKHR; + struct AccelerationStructureCreateInfoKHR; + struct WriteDescriptorSetAccelerationStructureKHR; + struct PhysicalDeviceAccelerationStructureFeaturesKHR; + struct PhysicalDeviceAccelerationStructurePropertiesKHR; + struct AccelerationStructureDeviceAddressInfoKHR; + struct AccelerationStructureVersionInfoKHR; + struct CopyAccelerationStructureToMemoryInfoKHR; + struct CopyMemoryToAccelerationStructureInfoKHR; + struct CopyAccelerationStructureInfoKHR; + struct AccelerationStructureBuildSizesInfoKHR; + + //=== VK_NV_framebuffer_mixed_samples === + struct PipelineCoverageModulationStateCreateInfoNV; + + //=== VK_NV_shader_sm_builtins === + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; + + //=== VK_EXT_image_drm_format_modifier === + struct DrmFormatModifierPropertiesListEXT; + struct DrmFormatModifierPropertiesEXT; + struct PhysicalDeviceImageDrmFormatModifierInfoEXT; + struct ImageDrmFormatModifierListCreateInfoEXT; + struct ImageDrmFormatModifierExplicitCreateInfoEXT; + struct ImageDrmFormatModifierPropertiesEXT; + struct DrmFormatModifierPropertiesList2EXT; + struct DrmFormatModifierProperties2EXT; + + //=== VK_EXT_validation_cache === + struct ValidationCacheCreateInfoEXT; + struct ShaderModuleValidationCacheCreateInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + struct PhysicalDevicePortabilitySubsetFeaturesKHR; + struct PhysicalDevicePortabilitySubsetPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + struct ShadingRatePaletteNV; + struct PipelineViewportShadingRateImageStateCreateInfoNV; + struct PhysicalDeviceShadingRateImageFeaturesNV; + struct PhysicalDeviceShadingRateImagePropertiesNV; + struct CoarseSampleLocationNV; + struct CoarseSampleOrderCustomNV; + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; + + //=== VK_NV_ray_tracing === + struct RayTracingShaderGroupCreateInfoNV; + struct RayTracingPipelineCreateInfoNV; + struct GeometryTrianglesNV; + struct GeometryAABBNV; + struct GeometryDataNV; + struct GeometryNV; + struct AccelerationStructureInfoNV; + struct AccelerationStructureCreateInfoNV; + struct BindAccelerationStructureMemoryInfoNV; + struct WriteDescriptorSetAccelerationStructureNV; + struct AccelerationStructureMemoryRequirementsInfoNV; + struct PhysicalDeviceRayTracingPropertiesNV; + + //=== VK_NV_representative_fragment_test === + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + struct PipelineRepresentativeFragmentTestStateCreateInfoNV; + + //=== VK_EXT_filter_cubic === + struct PhysicalDeviceImageViewImageFormatInfoEXT; + struct FilterCubicImageViewImageFormatPropertiesEXT; + + //=== VK_EXT_external_memory_host === + struct ImportMemoryHostPointerInfoEXT; + struct MemoryHostPointerPropertiesEXT; + struct PhysicalDeviceExternalMemoryHostPropertiesEXT; + + //=== VK_KHR_shader_clock === + struct PhysicalDeviceShaderClockFeaturesKHR; + + //=== VK_AMD_pipeline_compiler_control === + struct PipelineCompilerControlCreateInfoAMD; + + //=== VK_EXT_calibrated_timestamps === + struct CalibratedTimestampInfoEXT; + + //=== VK_AMD_shader_core_properties === + struct PhysicalDeviceShaderCorePropertiesAMD; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_decode_h265 === + struct VideoDecodeH265ProfileEXT; + struct VideoDecodeH265CapabilitiesEXT; + struct VideoDecodeH265SessionCreateInfoEXT; + struct VideoDecodeH265SessionParametersCreateInfoEXT; + struct VideoDecodeH265SessionParametersAddInfoEXT; + struct VideoDecodeH265PictureInfoEXT; + struct VideoDecodeH265DpbSlotInfoEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_global_priority === + struct DeviceQueueGlobalPriorityCreateInfoKHR; + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; + struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + struct QueueFamilyGlobalPriorityPropertiesKHR; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; + + //=== VK_AMD_memory_overallocation_behavior === + struct DeviceMemoryOverallocationCreateInfoAMD; + + //=== VK_EXT_vertex_attribute_divisor === + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + struct VertexInputBindingDivisorDescriptionEXT; + struct PipelineVertexInputDivisorStateCreateInfoEXT; + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + struct PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_compute_shader_derivatives === + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; + + //=== VK_NV_mesh_shader === + struct PhysicalDeviceMeshShaderFeaturesNV; + struct PhysicalDeviceMeshShaderPropertiesNV; + struct DrawMeshTasksIndirectCommandNV; + + //=== VK_NV_fragment_shader_barycentric === + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + //=== VK_NV_shader_image_footprint === + struct PhysicalDeviceShaderImageFootprintFeaturesNV; + + //=== VK_NV_scissor_exclusive === + struct PipelineViewportExclusiveScissorStateCreateInfoNV; + struct PhysicalDeviceExclusiveScissorFeaturesNV; + + //=== VK_NV_device_diagnostic_checkpoints === + struct QueueFamilyCheckpointPropertiesNV; + struct CheckpointDataNV; + + //=== VK_INTEL_shader_integer_functions2 === + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + //=== VK_INTEL_performance_query === + union PerformanceValueDataINTEL; + struct PerformanceValueINTEL; + struct InitializePerformanceApiInfoINTEL; + struct QueryPoolPerformanceQueryCreateInfoINTEL; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + struct PerformanceMarkerInfoINTEL; + struct PerformanceStreamMarkerInfoINTEL; + struct PerformanceOverrideInfoINTEL; + struct PerformanceConfigurationAcquireInfoINTEL; + + //=== VK_EXT_pci_bus_info === + struct PhysicalDevicePCIBusInfoPropertiesEXT; + + //=== VK_AMD_display_native_hdr === + struct DisplayNativeHdrSurfaceCapabilitiesAMD; + struct SwapchainDisplayNativeHdrCreateInfoAMD; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + struct ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + struct MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + struct PhysicalDeviceFragmentDensityMapFeaturesEXT; + struct PhysicalDeviceFragmentDensityMapPropertiesEXT; + struct RenderPassFragmentDensityMapCreateInfoEXT; + + //=== VK_KHR_fragment_shading_rate === + struct FragmentShadingRateAttachmentInfoKHR; + struct PipelineFragmentShadingRateStateCreateInfoKHR; + struct PhysicalDeviceFragmentShadingRateFeaturesKHR; + struct PhysicalDeviceFragmentShadingRatePropertiesKHR; + struct PhysicalDeviceFragmentShadingRateKHR; + + //=== VK_AMD_shader_core_properties2 === + struct PhysicalDeviceShaderCoreProperties2AMD; + + //=== VK_AMD_device_coherent_memory === + struct PhysicalDeviceCoherentMemoryFeaturesAMD; + + //=== VK_EXT_shader_image_atomic_int64 === + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + //=== VK_EXT_memory_budget === + struct PhysicalDeviceMemoryBudgetPropertiesEXT; + + //=== VK_EXT_memory_priority === + struct PhysicalDeviceMemoryPriorityFeaturesEXT; + struct MemoryPriorityAllocateInfoEXT; + + //=== VK_KHR_surface_protected_capabilities === + struct SurfaceProtectedCapabilitiesKHR; + + //=== VK_NV_dedicated_allocation_image_aliasing === + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + //=== VK_EXT_buffer_device_address === + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + struct BufferDeviceAddressCreateInfoEXT; + + //=== VK_EXT_validation_features === + struct ValidationFeaturesEXT; + + //=== VK_KHR_present_wait === + struct PhysicalDevicePresentWaitFeaturesKHR; + + //=== VK_NV_cooperative_matrix === + struct CooperativeMatrixPropertiesNV; + struct PhysicalDeviceCooperativeMatrixFeaturesNV; + struct PhysicalDeviceCooperativeMatrixPropertiesNV; + + //=== VK_NV_coverage_reduction_mode === + struct PhysicalDeviceCoverageReductionModeFeaturesNV; + struct PipelineCoverageReductionStateCreateInfoNV; + struct FramebufferMixedSamplesCombinationNV; + + //=== VK_EXT_fragment_shader_interlock === + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + //=== VK_EXT_ycbcr_image_arrays === + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; + + //=== VK_EXT_provoking_vertex === + struct PhysicalDeviceProvokingVertexFeaturesEXT; + struct PhysicalDeviceProvokingVertexPropertiesEXT; + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + struct SurfaceFullScreenExclusiveInfoEXT; + struct SurfaceCapabilitiesFullScreenExclusiveEXT; + struct SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + struct HeadlessSurfaceCreateInfoEXT; + + //=== VK_EXT_line_rasterization === + struct PhysicalDeviceLineRasterizationFeaturesEXT; + struct PhysicalDeviceLineRasterizationPropertiesEXT; + struct PipelineRasterizationLineStateCreateInfoEXT; + + //=== VK_EXT_shader_atomic_float === + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT; + + //=== VK_EXT_index_type_uint8 === + struct PhysicalDeviceIndexTypeUint8FeaturesEXT; + + //=== VK_EXT_extended_dynamic_state === + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT; + + //=== VK_KHR_pipeline_executable_properties === + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + struct PipelineInfoKHR; + struct PipelineExecutablePropertiesKHR; + struct PipelineExecutableInfoKHR; + union PipelineExecutableStatisticValueKHR; + struct PipelineExecutableStatisticKHR; + struct PipelineExecutableInternalRepresentationKHR; + + //=== VK_EXT_shader_atomic_float2 === + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + //=== VK_NV_device_generated_commands === + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + struct GraphicsShaderGroupCreateInfoNV; + struct GraphicsPipelineShaderGroupsCreateInfoNV; + struct BindShaderGroupIndirectCommandNV; + struct BindIndexBufferIndirectCommandNV; + struct BindVertexBufferIndirectCommandNV; + struct SetStateFlagsIndirectCommandNV; + struct IndirectCommandsStreamNV; + struct IndirectCommandsLayoutTokenNV; + struct IndirectCommandsLayoutCreateInfoNV; + struct GeneratedCommandsInfoNV; + struct GeneratedCommandsMemoryRequirementsInfoNV; + + //=== VK_NV_inherited_viewport_scissor === + struct PhysicalDeviceInheritedViewportScissorFeaturesNV; + struct CommandBufferInheritanceViewportScissorInfoNV; + + //=== VK_EXT_texel_buffer_alignment === + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + //=== VK_QCOM_render_pass_transform === + struct RenderPassTransformBeginInfoQCOM; + struct CommandBufferInheritanceRenderPassTransformInfoQCOM; + + //=== VK_EXT_device_memory_report === + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT; + struct DeviceDeviceMemoryReportCreateInfoEXT; + struct DeviceMemoryReportCallbackDataEXT; + + //=== VK_EXT_robustness2 === + struct PhysicalDeviceRobustness2FeaturesEXT; + struct PhysicalDeviceRobustness2PropertiesEXT; + + //=== VK_EXT_custom_border_color === + struct SamplerCustomBorderColorCreateInfoEXT; + struct PhysicalDeviceCustomBorderColorPropertiesEXT; + struct PhysicalDeviceCustomBorderColorFeaturesEXT; + + //=== VK_KHR_pipeline_library === + struct PipelineLibraryCreateInfoKHR; + + //=== VK_KHR_present_id === + struct PresentIdKHR; + struct PhysicalDevicePresentIdFeaturesKHR; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + struct VideoEncodeInfoKHR; + struct VideoEncodeRateControlInfoKHR; + struct VideoEncodeRateControlLayerInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + struct PhysicalDeviceDiagnosticsConfigFeaturesNV; + struct DeviceDiagnosticsConfigCreateInfoNV; + + //=== VK_KHR_synchronization2 === + struct QueueFamilyCheckpointProperties2NV; + struct CheckpointData2NV; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + //=== VK_NV_fragment_shading_rate_enums === + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + struct PipelineFragmentShadingRateEnumStateCreateInfoNV; + + //=== VK_NV_ray_tracing_motion_blur === + struct AccelerationStructureGeometryMotionTrianglesDataNV; + struct AccelerationStructureMotionInfoNV; + struct AccelerationStructureMotionInstanceNV; + union AccelerationStructureMotionInstanceDataNV; + struct AccelerationStructureMatrixMotionInstanceNV; + struct AccelerationStructureSRTMotionInstanceNV; + struct SRTDataNV; + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV; + + //=== VK_EXT_ycbcr_2plane_444_formats === + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + //=== VK_EXT_fragment_density_map2 === + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT; + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT; + + //=== VK_QCOM_rotated_copy_commands === + struct CopyCommandTransformInfoQCOM; + + //=== VK_KHR_workgroup_memory_explicit_layout === + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + //=== VK_EXT_4444_formats === + struct PhysicalDevice4444FormatsFeaturesEXT; + + //=== VK_ARM_rasterization_order_attachment_access === + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + + //=== VK_EXT_rgba10x6_formats === + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + struct DirectFBSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + struct RayTracingShaderGroupCreateInfoKHR; + struct RayTracingPipelineCreateInfoKHR; + struct PhysicalDeviceRayTracingPipelineFeaturesKHR; + struct PhysicalDeviceRayTracingPipelinePropertiesKHR; + struct StridedDeviceAddressRegionKHR; + struct TraceRaysIndirectCommandKHR; + struct RayTracingPipelineInterfaceCreateInfoKHR; + + //=== VK_KHR_ray_query === + struct PhysicalDeviceRayQueryFeaturesKHR; + + //=== VK_VALVE_mutable_descriptor_type === + struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + struct MutableDescriptorTypeListVALVE; + struct MutableDescriptorTypeCreateInfoVALVE; + + //=== VK_EXT_vertex_input_dynamic_state === + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + struct VertexInputBindingDescription2EXT; + struct VertexInputAttributeDescription2EXT; + + //=== VK_EXT_physical_device_drm === + struct PhysicalDeviceDrmPropertiesEXT; + + //=== VK_EXT_depth_clip_control === + struct PhysicalDeviceDepthClipControlFeaturesEXT; + struct PipelineViewportDepthClipControlCreateInfoEXT; + + //=== VK_EXT_primitive_topology_list_restart === + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + struct ImportMemoryZirconHandleInfoFUCHSIA; + struct MemoryZirconHandlePropertiesFUCHSIA; + struct MemoryGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + struct ImportSemaphoreZirconHandleInfoFUCHSIA; + struct SemaphoreGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + struct BufferCollectionCreateInfoFUCHSIA; + struct ImportMemoryBufferCollectionFUCHSIA; + struct BufferCollectionImageCreateInfoFUCHSIA; + struct BufferConstraintsInfoFUCHSIA; + struct BufferCollectionBufferCreateInfoFUCHSIA; + struct BufferCollectionPropertiesFUCHSIA; + struct SysmemColorSpaceFUCHSIA; + struct ImageConstraintsInfoFUCHSIA; + struct ImageFormatConstraintsInfoFUCHSIA; + struct BufferCollectionConstraintsInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + struct SubpassShadingPipelineCreateInfoHUAWEI; + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI; + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI; + + //=== VK_HUAWEI_invocation_mask === + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI; + + //=== VK_NV_external_memory_rdma === + struct MemoryGetRemoteAddressInfoNV; + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV; + + //=== VK_EXT_extended_dynamic_state2 === + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + struct ScreenSurfaceCreateInfoQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + struct PhysicalDeviceColorWriteEnableFeaturesEXT; + struct PipelineColorWriteCreateInfoEXT; + + //=== VK_EXT_image_view_min_lod === + struct PhysicalDeviceImageViewMinLodFeaturesEXT; + struct ImageViewMinLodCreateInfoEXT; + + //=== VK_EXT_multi_draw === + struct PhysicalDeviceMultiDrawFeaturesEXT; + struct PhysicalDeviceMultiDrawPropertiesEXT; + struct MultiDrawInfoEXT; + struct MultiDrawIndexedInfoEXT; + + //=== VK_EXT_border_color_swizzle === + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT; + struct SamplerBorderColorComponentMappingCreateInfoEXT; + + //=== VK_EXT_pageable_device_local_memory === + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + //=== VK_QCOM_fragment_density_map_offset === + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + struct SubpassFragmentDensityMapOffsetEndInfoQCOM; + + //=== VK_NV_linear_color_attachment === + struct PhysicalDeviceLinearColorAttachmentFeaturesNV; + + //=============== + //=== HANDLEs === + //=============== + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + using NativeType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() = default; + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceKHR const & ) const = default; +#else + bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + using NativeType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default; + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackEXT const & ) const = default; +#else + bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == + sizeof( VkDebugReportCallbackEXT ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackEXT is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + using NativeType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default; + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == + sizeof( VkDebugUtilsMessengerEXT ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerEXT is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + using NativeType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() = default; + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayKHR const & ) const = default; +#else + bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR < rhs.m_displayKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + using NativeType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() = default; + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainKHR const & ) const = default; +#else + bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + using NativeType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + VULKAN_HPP_CONSTEXPR Semaphore() = default; + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Semaphore const & ) const = default; +#else + bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore < rhs.m_semaphore; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Semaphore is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Fence + { + public: + using CType = VkFence; + using NativeType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + VULKAN_HPP_CONSTEXPR Fence() = default; + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Fence const & ) const = default; +#else + bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence == rhs.m_fence; + } + + bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence != rhs.m_fence; + } + + bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence < rhs.m_fence; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Fence is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + using NativeType = VkPerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default; + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PerformanceConfigurationINTEL & + operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; + } +#endif + + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; + } + + bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; + } + + bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == + sizeof( VkPerformanceConfigurationINTEL ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PerformanceConfigurationINTEL is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + using NativeType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + VULKAN_HPP_CONSTEXPR QueryPool() = default; + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPool const & ) const = default; +#else + bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool < rhs.m_queryPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPool is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Buffer + { + public: + using CType = VkBuffer; + using NativeType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + VULKAN_HPP_CONSTEXPR Buffer() = default; + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_buffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Buffer const & ) const = default; +#else + bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer != rhs.m_buffer; + } + + bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer < rhs.m_buffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Buffer is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + using NativeType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() = default; + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayout const & ) const = default; +#else + bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLayout is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + using NativeType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() = default; + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet( descriptorSet ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSet const & ) const = default; +#else + bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet < rhs.m_descriptorSet; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSet is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ImageView + { + public: + using CType = VkImageView; + using NativeType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + VULKAN_HPP_CONSTEXPR ImageView() = default; + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageView const & ) const = default; +#else + bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView != rhs.m_imageView; + } + + bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView < rhs.m_imageView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageView is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + using NativeType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + VULKAN_HPP_CONSTEXPR Pipeline() = default; + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Pipeline const & ) const = default; +#else + bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline < rhs.m_pipeline; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Pipeline is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Image + { + public: + using CType = VkImage; + using NativeType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + VULKAN_HPP_CONSTEXPR Image() = default; + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Image const & ) const = default; +#else + bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image == rhs.m_image; + } + + bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image != rhs.m_image; + } + + bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image < rhs.m_image; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Image is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + using NativeType = VkAccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default; + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif + + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureNV const & ) const = default; +#else + bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + } + + bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + } + + bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == + sizeof( VkAccelerationStructureNV ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureNV is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + using NativeType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default; + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplate const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == + sizeof( VkDescriptorUpdateTemplate ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplate is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class Event + { + public: + using CType = VkEvent; + using NativeType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + VULKAN_HPP_CONSTEXPR Event() = default; + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Event const & ) const = default; +#else + bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event == rhs.m_event; + } + + bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event != rhs.m_event; + } + + bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event < rhs.m_event; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Event is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using NativeType = VkAccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default; + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = accelerationStructureKHR; + return *this; + } +#endif + + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureKHR const & ) const = default; +#else + bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; + } + + bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; + } + + bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == + sizeof( VkAccelerationStructureKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + using NativeType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() = default; + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer( commandBuffer ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBuffer const & ) const = default; +#else + bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer < rhs.m_commandBuffer; + } +#endif + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result + begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, + ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, + ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLineWidth( float lineWidth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setBlendConstants( const float blendConstants[4], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBounds( float minDepthBounds, + float maxDepthBounds, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + void setDeviceMask( uint32_t deviceMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_3 === + + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRendering( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWithCount( ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissorWithCount( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissorWithCount( ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_marker === + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_draw_indirect_count === + + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering === + + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderingKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + template + void setDeviceMaskKHR( uint32_t deviceMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_conditional_rendering === + + template + void beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy const & viewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_discard_rectangles === + + template + void + setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_create_renderpass2 === + + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_utils === + + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_sample_locations === + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_acceleration_structure === + + template + void buildAccelerationStructuresKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructuresIndirectKHR( + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_shading_rate_image === + + template + void + bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( + uint32_t firstViewport, + ArrayProxy const & shadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_ray_tracing === + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeAccelerationStructuresPropertiesNV( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_draw_indirect_count === + + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + template + void drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void setCheckpointNV( const void * pCheckpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_fragment_shading_rate === + + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_line_rasterization === + + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setViewportWithCountEXT( ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setScissorWithCountEXT( ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2EXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void + setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + template + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_vertex_input_dynamic_state === + + template + void + setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_HUAWEI_subpass_shading === + + template + void subpassShadingHUAWEI( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_invocation_mask === + + template + void + bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state2 === + + template + void + setPatchControlPointsEXT( uint32_t patchControlPoints, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + template + void + setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_multi_draw === + + template + void drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMultiEXT( ArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMultiIndexedEXT( ArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBuffer is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + using NativeType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() = default; + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory( deviceMemory ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemory const & ) const = default; +#else + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory < rhs.m_deviceMemory; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemory is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + using NativeType = VkVideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default; + VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionKHR( videoSessionKHR ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = videoSessionKHR; + return *this; + } +# endif + + VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionKHR const & ) const = default; +# else + bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == rhs.m_videoSessionKHR; + } + + bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != rhs.m_videoSessionKHR; + } + + bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR < rhs.m_videoSessionKHR; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionKHR m_videoSessionKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + using NativeType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default; + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = deferredOperationKHR; + return *this; + } +#endif + + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeferredOperationKHR const & ) const = default; +#else + bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == rhs.m_deferredOperationKHR; + } + + bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != rhs.m_deferredOperationKHR; + } + + bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR < rhs.m_deferredOperationKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeferredOperationKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA + { + public: + using CType = VkBufferCollectionFUCHSIA; + using NativeType = VkBufferCollectionFUCHSIA; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + + public: + VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default; + VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferCollectionFUCHSIA & operator=( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = bufferCollectionFUCHSIA; + return *this; + } +# endif + + BufferCollectionFUCHSIA & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA == rhs.m_bufferCollectionFUCHSIA; + } + + bool operator!=( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA != rhs.m_bufferCollectionFUCHSIA; + } + + bool operator<( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA < rhs.m_bufferCollectionFUCHSIA; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA == VK_NULL_HANDLE; + } + + private: + VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) == + sizeof( VkBufferCollectionFUCHSIA ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + class BufferView + { + public: + using CType = VkBufferView; + using NativeType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + VULKAN_HPP_CONSTEXPR BufferView() = default; + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferView const & ) const = default; +#else + bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView < rhs.m_bufferView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferView is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CommandPool + { + public: + using CType = VkCommandPool; + using NativeType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + VULKAN_HPP_CONSTEXPR CommandPool() = default; + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + : m_commandPool( commandPool ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPool const & ) const = default; +#else + bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool < rhs.m_commandPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandPool is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + using NativeType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + VULKAN_HPP_CONSTEXPR PipelineCache() = default; + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache( pipelineCache ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCache const & ) const = default; +#else + bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache < rhs.m_pipelineCache; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCache is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + using NativeType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default; + VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + : m_cuFunctionNVX( cuFunctionNVX ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = cuFunctionNVX; + return *this; + } +#endif + + CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuFunctionNVX const & ) const = default; +#else + bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == rhs.m_cuFunctionNVX; + } + + bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != rhs.m_cuFunctionNVX; + } + + bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX < rhs.m_cuFunctionNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == VK_NULL_HANDLE; + } + + private: + VkCuFunctionNVX m_cuFunctionNVX = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuFunctionNVX is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + using NativeType = VkCuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + VULKAN_HPP_CONSTEXPR CuModuleNVX() = default; + VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + : m_cuModuleNVX( cuModuleNVX ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = cuModuleNVX; + return *this; + } +#endif + + CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleNVX const & ) const = default; +#else + bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == rhs.m_cuModuleNVX; + } + + bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != rhs.m_cuModuleNVX; + } + + bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX < rhs.m_cuModuleNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == VK_NULL_HANDLE; + } + + private: + VkCuModuleNVX m_cuModuleNVX = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleNVX is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + using NativeType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() = default; + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool( descriptorPool ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPool const & ) const = default; +#else + bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool < rhs.m_descriptorPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPool is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + using NativeType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default; + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayout const & ) const = default; +#else + bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayout is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + using NativeType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + VULKAN_HPP_CONSTEXPR Framebuffer() = default; + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + : m_framebuffer( framebuffer ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Framebuffer const & ) const = default; +#else + bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer < rhs.m_framebuffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Framebuffer is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + using NativeType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; + } + + bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; + } + + bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == + sizeof( VkIndirectCommandsLayoutNV ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutNV is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + using NativeType = VkPrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PrivateDataSlot() = default; + VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT + : m_privateDataSlot( privateDataSlot ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PrivateDataSlot & operator=( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = privateDataSlot; + return *this; + } +#endif + + PrivateDataSlot & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlot const & ) const = default; +#else + bool operator==( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot == rhs.m_privateDataSlot; + } + + bool operator!=( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot != rhs.m_privateDataSlot; + } + + bool operator<( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot < rhs.m_privateDataSlot; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlot m_privateDataSlot = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PrivateDataSlot is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using PrivateDataSlotEXT = PrivateDataSlot; + + class RenderPass + { + public: + using CType = VkRenderPass; + using NativeType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + VULKAN_HPP_CONSTEXPR RenderPass() = default; + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPass const & ) const = default; +#else + bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass < rhs.m_renderPass; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPass is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Sampler + { + public: + using CType = VkSampler; + using NativeType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + VULKAN_HPP_CONSTEXPR Sampler() = default; + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Sampler const & ) const = default; +#else + bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler != rhs.m_sampler; + } + + bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler < rhs.m_sampler; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Sampler is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + using NativeType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default; + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversion const & ) const = default; +#else + bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == + sizeof( VkSamplerYcbcrConversion ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversion is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + class ShaderModule + { + public: + using CType = VkShaderModule; + using NativeType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + VULKAN_HPP_CONSTEXPR ShaderModule() = default; + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + : m_shaderModule( shaderModule ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModule const & ) const = default; +#else + bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule < rhs.m_shaderModule; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModule is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + using NativeType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default; + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheEXT const & ) const = default; +#else + bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationCacheEXT is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using NativeType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default; + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT + VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( videoSessionParametersKHR ) + {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = videoSessionParametersKHR; + return *this; + } +# endif + + VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR; + } + + bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR; + } + + bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == + sizeof( VkVideoSessionParametersKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class Queue + { + public: + using CType = VkQueue; + using NativeType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + VULKAN_HPP_CONSTEXPR Queue() = default; + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Queue const & ) const = default; +#else + bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue == rhs.m_queue; + } + + bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue != rhs.m_queue; + } + + bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue < rhs.m_queue; + } +#endif + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result + submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result + submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result + presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_utils === + + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CheckpointDataNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_NODISCARD Result + submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CheckpointData2NVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Queue is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueAccelerationStructureKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueAccelerationStructureNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBuffer = UniqueHandle; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBufferCollectionFUCHSIA = UniqueHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueBufferView = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + using UniqueCommandBuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCommandPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCuFunctionNVX = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueCuModuleNVX = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDeferredOperationKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + using UniqueDescriptorSet = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorSetLayout = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectFree; + }; + using UniqueDeviceMemory = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueEvent = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueFence = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueFramebuffer = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueImage = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueImageView = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipeline = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipelineCache = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePipelineLayout = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniquePrivateDataSlot = UniqueHandle; + using UniquePrivateDataSlotEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueQueryPool = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueRenderPass = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSampler = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSemaphore = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueShaderModule = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSwapchainKHR = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueValidationCacheEXT = UniqueHandle; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueVideoSessionKHR = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueVideoSessionParametersKHR = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + using CType = VkDevice; + using NativeType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + VULKAN_HPP_CONSTEXPR Device() = default; + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Device const & ) const = default; +#else + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device == rhs.m_device; + } + + bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device != rhs.m_device; + } + + bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device < rhs.m_device; + } +#endif + + //=== VK_VERSION_1_0 === + + template + PFN_vkVoidFunction + getProcAddr( const char * pName, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction + getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result + flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + flushMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( + uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + invalidateMappedMemoryRanges( ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( + VULKAN_HPP_NAMESPACE::Buffer buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( + VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirementsAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + resetFences( ArrayProxy const & fences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + ArrayProxy const & data, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( + VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DescriptorSetAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename B = DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CommandBufferAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename B = CommandBufferAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupport( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_2 === + + template + VULKAN_HPP_NODISCARD Result + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result + createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlot( + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t + getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ImageAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + ImageAllocator & imageAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display_swapchain === + + template + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( + uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SwapchainKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType>::type + createSharedSwapchainsKHR( ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = SwapchainKHRAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( + ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_debug_marker === + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD Result + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pVideoSessionMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename VideoGetMemoryPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = VideoGetMemoryPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t videoSessionBindMemoryCount, + const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + ArrayProxy const & videoSessionBindMemories, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createVideoSessionParametersKHRUnique( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + + template + VULKAN_HPP_NODISCARD Result + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NVX_image_view_handle === + + template + uint32_t + getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t + getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD Result + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance1 === + + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD Result + getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD Result + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD Result + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PastPresentationTimingGOOGLEAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PastPresentationTimingGOOGLEAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_hdr_metadata === + + template + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD Result + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD Result + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD Result + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( + const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_get_memory_requirements2 === + + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( + uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + ArrayProxy const & data, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceAddress getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( + const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD Result + bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2KHR( ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD Result + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupportKHR( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_external_memory_host === + + template + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( + uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Uint64_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_timeline_semaphore === + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + acquirePerformanceConfigurationINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + acquirePerformanceConfigurationINTELUnique( + const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releasePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( + VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_AMD_display_native_hdr === + + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_buffer_device_address === + + template + DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + template + DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_host_query_reset === + + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDeferredOperationKHR( Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD Result + deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutablePropertiesKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutablePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutableStatisticKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PipelineExecutableInternalRepresentationKHRAllocator = + std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineExecutableInternalRepresentationKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_device_generated_commands === + + template + void getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_private_data === + + template + VULKAN_HPP_NODISCARD Result + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlotEXT( + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t + getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B = PipelineAllocator, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + ArrayProxy const & data, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + Dispatch const & d + VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template + VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( + const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setBufferCollectionImageConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionImageConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result setBufferCollectionBufferConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionBufferConstraintsFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + template + VULKAN_HPP_NODISCARD Result getSubpassShadingMaxWorkgroupSizeHUAWEI( + VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_external_memory_rdma === + + template + VULKAN_HPP_NODISCARD Result + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_pageable_device_local_memory === + + template + void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + float priority, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance4 === + + template + void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Device is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + using NativeType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default; + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR( displayModeKHR ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeKHR const & ) const = default; +#else + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeKHR is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + using NativeType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() = default; + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( physicalDevice ) + {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice const & ) const = default; +#else + bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice < rhs.m_physicalDevice; + } +#endif + + //=== VK_VERSION_1_0 === + + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( + const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result + getToolProperties( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PhysicalDeviceToolPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceToolPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_surface === + + template + VULKAN_HPP_NODISCARD Result + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SurfaceFormatKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = Rect2DAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Rect2DAllocator & rect2DAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD Result + getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPlanePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( + uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + DisplayKHRAllocator & displayKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayModePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD Result + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = VideoFormatPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_physical_device_properties2 === + + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename QueueFamilyProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void + getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD StructureChain + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename SparseImageFormatProperties2Allocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_memory_capabilities === + + template + void getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_semaphore_capabilities === + + template + void getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD Result + acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRandROutputDisplayEXT( + Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type + getRandROutputDisplayEXTUnique( Display & dpy, + RROutput rrOutput, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + template + void getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::pair, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PerformanceCounterKHRAllocator, + typename B2 = PerformanceCounterDescriptionKHRAllocator, + typename std::enable_if::value && + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::pair, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( + uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = SurfaceFormat2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD Result + getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayPlaneProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = DisplayModeProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS + typename ResultValueType::type + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_sample_locations === + + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( + uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = TimeDomainEXTAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_fragment_shading_rate === + + template + VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( + uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceFragmentShadingRateKHRAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getFragmentShadingRatesKHR( + PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_EXT_tooling_info === + + template + VULKAN_HPP_NODISCARD Result + getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PhysicalDeviceToolPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceToolPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = CooperativeMatrixPropertiesNVAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_coverage_reduction_mode === + + template + VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getSupportedFramebufferMixedSamplesCombinationsNV( + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = FramebufferMixedSamplesCombinationNVAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + getDrmDisplayEXTUnique( int32_t drmFd, + uint32_t connectorId, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + getWinrtDisplayNVUnique( uint32_t deviceRelativeId, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( + "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + using CType = VkInstance; + using NativeType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + VULKAN_HPP_CONSTEXPR Instance() = default; + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Instance const & ) const = default; +#else + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance == rhs.m_instance; + } + + bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance != rhs.m_instance; + } + + bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance < rhs.m_instance; + } +#endif + + //=== VK_VERSION_1_0 === + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction + getProcAddr( const char * pName, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction + getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_surface === + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( + const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + VULKAN_HPP_NODISCARD Result + createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + VULKAN_HPP_NODISCARD Result + createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + VULKAN_HPP_NODISCARD Result + createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + template + VULKAN_HPP_NODISCARD Result + createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + VULKAN_HPP_NODISCARD Result + createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + template + VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( + const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + template + VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createStreamDescriptorSurfaceGGPUnique( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD Result + createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( + uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD Result + createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD Result + createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE + typename ResultValueType>::type + createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + template + VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + template + VULKAN_HPP_NODISCARD Result + createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + template + VULKAN_HPP_NODISCARD Result + createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + VULKAN_HPP_NODISCARD Result + createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + VULKAN_HPP_NODISCARD Result + createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), + "handle and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Instance is not nothrow_move_constructible!" ); + + template <> + struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type + { + using type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + //=== VK_VERSION_1_0 === + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createInstance( + const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType>::type + createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( + const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result + enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( + uint32_t * pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type + enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_hash.hpp b/Externals/Vulkan/Include/vulkan/vulkan_hash.hpp new file mode 100644 index 000000000000..f5e13e58ceb5 --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_hash.hpp @@ -0,0 +1,13191 @@ +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HASH_HPP +#define VULKAN_HASH_HPP + +#include + +namespace std +{ + //======================================= + //=== HASH structures for Flags types === + //======================================= + + template + struct hash> + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Flags const & flags ) const VULKAN_HPP_NOEXCEPT + { + return std::hash::type>{}( + static_cast::type>( flags ) ); + } + }; + + //=================================== + //=== HASH structures for handles === + //=================================== + + //=== VK_VERSION_1_0 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Instance const & instance ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( instance ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( physicalDevice ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Device const & device ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( device ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Queue const & queue ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queue ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deviceMemory ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Fence const & fence ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( fence ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( semaphore ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Event const & event ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( event ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queryPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( buffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( bufferView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Image const & image ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( image ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( imageView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderModule ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineCache ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipeline ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( sampler ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSet ) ); + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSetLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( framebuffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( renderPass ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandBuffer ) ); + } + }; + + //=== VK_VERSION_1_1 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( samplerYcbcrConversion ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( descriptorUpdateTemplate ) ); + } + }; + + //=== VK_VERSION_1_3 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlot const & privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( privateDataSlot ) ); + } + }; + + //=== VK_KHR_surface === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( surfaceKHR ) ); + } + }; + + //=== VK_KHR_swapchain === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( swapchainKHR ) ); + } + }; + + //=== VK_KHR_display === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayModeKHR ) ); + } + }; + + //=== VK_EXT_debug_report === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugReportCallbackEXT ) ); + } + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( videoSessionParametersKHR ) ); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuModuleNVX ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuFunctionNVX ) ); + } + }; + + //=== VK_EXT_debug_utils === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugUtilsMessengerEXT ) ); + } + }; + + //=== VK_KHR_acceleration_structure === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( accelerationStructureKHR ) ); + } + }; + + //=== VK_EXT_validation_cache === + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( validationCacheEXT ) ); + } + }; + + //=== VK_NV_ray_tracing === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( accelerationStructureNV ) ); + } + }; + + //=== VK_INTEL_performance_query === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) + const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( performanceConfigurationINTEL ) ); + } + }; + + //=== VK_KHR_deferred_host_operations === + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deferredOperationKHR ) ); + } + }; + + //=== VK_NV_device_generated_commands === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( indirectCommandsLayoutNV ) ); + } + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & bufferCollectionFUCHSIA ) const + VULKAN_HPP_NOEXCEPT + { + return std::hash{}( + static_cast( bufferCollectionFUCHSIA ) ); + } + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if 14 <= VULKAN_HPP_CPP_VERSION + //====================================== + //=== HASH structures for structures === + //====================================== + +# if !defined( VULKAN_HPP_HASH_COMBINE ) +# define VULKAN_HPP_HASH_COMBINE( seed, value ) \ + seed ^= std::hash::type>{}( value ) + 0x9e3779b9 + ( seed << 6 ) + ( seed >> 2 ) +# endif + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AabbPositionsKHR const & aabbPositionsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minX ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minY ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minZ ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxX ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxY ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxZ ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const & + accelerationStructureBuildRangeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.transformOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const & + accelerationStructureBuildSizesInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.accelerationStructureSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.updateScratchSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.buildScratchSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & + accelerationStructureCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.createFlags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.size ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & geometryTrianglesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexStride ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexFormat ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & geometryAABBNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.aabbData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.numAABBs ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.stride ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometryDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.triangles ); + VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.aabbs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryNV const & geometryNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometryType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometry ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & accelerationStructureInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.geometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pGeometries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & + accelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.compactedSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.info ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const & + accelerationStructureDeviceAddressInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformMatrixKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < 3; ++i ) + { + for ( size_t j = 0; j < 4; ++j ) + { + VULKAN_HPP_HASH_COMBINE( seed, transformMatrixKHR.matrix[i][j] ); + } + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & + accelerationStructureInstanceKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.transform ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & + accelerationStructureMatrixMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT0 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT1 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.mask ); + VULKAN_HPP_HASH_COMBINE( seed, + accelerationStructureMatrixMotionInstanceNV.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & + accelerationStructureMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const & + accelerationStructureMotionInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.maxInstances ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SRTDataNV const & sRTDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.a ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.b ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.c ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qw ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.ty ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tz ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & + accelerationStructureSRTMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT0 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT1 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const & + accelerationStructureVersionInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pVersionData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & acquireNextImageInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.timeout ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & acquireProfilingLockInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.timeout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AllocationCallbacks const & allocationCallbacks ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pUserData ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnReallocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnFree ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalFree ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComponentMapping const & componentMapping ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.r ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.g ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.b ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.a ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const & + androidHardwareBufferFormatProperties2ANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.format ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & + androidHardwareBufferFormatPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.format ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & + androidHardwareBufferPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & + androidHardwareBufferUsageANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.androidHardwareBufferUsage ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & androidSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ApplicationInfo const & applicationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.pNext ); + for ( const char * p = applicationInfo.pApplicationName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.applicationVersion ); + for ( const char * p = applicationInfo.pEngineName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.engineVersion ); + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.apiVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription const & attachmentDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.flags ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.format ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.samples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.loadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.storeOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilLoadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilStoreOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.initialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.finalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & attachmentDescription2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.format ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.samples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.loadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.storeOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilLoadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilStoreOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.initialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.finalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & + attachmentDescriptionStencilLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilInitialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilFinalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.attachment ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.layout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & attachmentReference2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.attachment ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.layout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.aspectMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & + attachmentReferenceStencilLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.stencilLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const & attachmentSampleCountInfoAMD ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pColorAttachmentSamples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.depthStencilAttachmentSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Extent2D const & extent2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, extent2D.width ); + VULKAN_HPP_HASH_COMBINE( seed, extent2D.height ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SampleLocationEXT const & sampleLocationEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.x ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsPerPixel ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const & attachmentSampleLocationsEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.attachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BaseInStructure const & baseInStructure ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.sType ); + VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BaseOutStructure const & baseOutStructure ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.sType ); + VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & + bindAccelerationStructureMemoryInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & bindBufferMemoryDeviceGroupInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & bindBufferMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memoryOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Offset2D const & offset2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, offset2D.x ); + VULKAN_HPP_HASH_COMBINE( seed, offset2D.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Rect2D const & rect2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rect2D.offset ); + VULKAN_HPP_HASH_COMBINE( seed, rect2D.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & bindImageMemoryDeviceGroupInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pDeviceIndices ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.splitInstanceBindRegionCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pSplitInstanceBindRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & bindImageMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memoryOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & bindImageMemorySwapchainInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.imageIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & bindImagePlaneMemoryInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const & + bindIndexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.indexType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const & + bindShaderGroupIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindShaderGroupIndirectCommandNV.groupIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseMemoryBind const & sparseMemoryBind ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.resourceOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.size ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memory ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const & sparseBufferMemoryBindInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const & sparseImageOpaqueMemoryBindInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.mipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.arrayLayer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Offset3D const & offset3D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, offset3D.x ); + VULKAN_HPP_HASH_COMBINE( seed, offset3D.y ); + VULKAN_HPP_HASH_COMBINE( seed, offset3D.z ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Extent3D const & extent3D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, extent3D.width ); + VULKAN_HPP_HASH_COMBINE( seed, extent3D.height ); + VULKAN_HPP_HASH_COMBINE( seed, extent3D.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const & sparseImageMemoryBind ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.subresource ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.offset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.extent ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memory ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const & sparseImageMemoryBindInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindSparseInfo const & bindSparseInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.bufferBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pBufferBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageOpaqueBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageOpaqueBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pSignalSemaphores ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const & + bindVertexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresourceLayers ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.mipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageBlit2 const & imageBlit2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcOffsets[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstOffsets[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BlitImageInfo2 const & blitImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pRegions ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.filter ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const & + bufferCollectionBufferCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & + bufferCollectionConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.maxBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForCamping ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForDedicatedSlack ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForSharedSlack ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & + bufferCollectionCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.collectionToken ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const & + bufferCollectionImageCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceFUCHSIA ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.colorSpace ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const & + bufferCollectionPropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.memoryTypeBits ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.bufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.createInfoIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemPixelFormat ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemColorSpaceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & bufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.size ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pQueueFamilyIndices ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const & bufferConstraintsInfoFUCHSIA ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.createInfo ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.requiredFormatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.bufferCollectionConstraints ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCopy const & bufferCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCopy2 const & bufferCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & + bufferDeviceAddressCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & bufferDeviceAddressInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferImageCopy const & bufferImageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferImageCopy2 const & bufferImageCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & bufferMemoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const & bufferMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & bufferMemoryRequirementsInfo2 ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & + bufferOpaqueCaptureAddressCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.opaqueCaptureAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.range ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & calibratedTimestampInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.timeDomain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CheckpointData2NV const & checkpointData2NV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.stage ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pCheckpointMarker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & checkpointDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.stage ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pCheckpointMarker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & clearDepthStencilValue ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.depth ); + VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.stencil ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearRect const & clearRect ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clearRect.rect ); + VULKAN_HPP_HASH_COMBINE( seed, clearRect.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, clearRect.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelY ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.sample ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleLocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandPool ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.level ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandBufferCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.occlusionQueryEnable ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.queryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pInheritanceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & + commandBufferInheritanceConditionalRenderingInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.conditionalRenderingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & + commandBufferInheritanceRenderPassTransformInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.transform ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.renderArea ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const & + commandBufferInheritanceRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.stencilAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.rasterizationSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Viewport const & viewport ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewport.x ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.y ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.width ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.height ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.minDepth ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.maxDepth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const & + commandBufferInheritanceViewportScissorInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportScissor2D ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportDepthCount ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pViewportDepths ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const & commandBufferSubmitInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.commandBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & commandPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.queueFamilyIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationMapEntry const & specializationMapEntry ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.constantID ); + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.offset ); + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SpecializationInfo const & specializationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.mapEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pMapEntries ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & pipelineShaderStageCreateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.stage ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.module ); + for ( const char * p = pipelineShaderStageCreateInfo.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pSpecializationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & computePipelineCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.stage ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & + conditionalRenderingBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ConformanceVersion const & conformanceVersion ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.major ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.minor ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.subminor ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.patch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & cooperativeMatrixPropertiesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.MSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.NSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.KSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.DType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.scope ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const & + copyAccelerationStructureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.src ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.dst ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const & copyBufferInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const & copyBufferToImageInfo2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const & copyCommandTransformInfoQCOM ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.transform ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & copyDescriptorSet ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcSet ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcBinding ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.descriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCopy2 const & imageCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageInfo2 const & copyImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const & copyImageToBufferInfo2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.module ); + for ( const char * p = cuFunctionCreateInfoNVX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const & cuLaunchInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.function ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sharedMemBytes ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.paramCount ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pParams ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.extraCount ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pExtras ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & cuModuleCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pData ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.waitSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.signalSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pSignalSemaphoreValues ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & debugMarkerMarkerInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.pNext ); + for ( const char * p = debugMarkerMarkerInfoEXT.pMarkerName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.color[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & debugMarkerObjectNameInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.object ); + for ( const char * p = debugMarkerObjectNameInfoEXT.pObjectName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & debugMarkerObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.object ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & + debugReportCallbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pfnCallback ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & debugUtilsLabelEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.pNext ); + for ( const char * p = debugUtilsLabelEXT.pLabelName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.color[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & debugUtilsObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectHandle ); + for ( const char * p = debugUtilsObjectNameInfoEXT.pObjectName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & + debugUtilsMessengerCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.flags ); + for ( const char * p = debugUtilsMessengerCallbackDataEXT.pMessageIdName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.messageIdNumber ); + for ( const char * p = debugUtilsMessengerCallbackDataEXT.pMessage; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.queueLabelCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pQueueLabels ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.cmdBufLabelCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pCmdBufLabels ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.objectCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pObjects ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & + debugUtilsMessengerCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageSeverity ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pfnUserCallback ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & debugUtilsObjectTagInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectHandle ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & + dedicatedAllocationBufferCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.dedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & + dedicatedAllocationImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.dedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & + dedicatedAllocationMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.image ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrier2 const & memoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstAccessMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & imageSubresourceRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseMipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.levelCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const & imageMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DependencyInfo const & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.dependencyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.memoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pMemoryBarriers ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.bufferMemoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pBufferMemoryBarriers ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.imageMemoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pImageMemoryBarriers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const & descriptorBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.range ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorImageInfo const & descriptorImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.sampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.type ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.descriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & descriptorPoolCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.maxSets ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.poolSizeCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pPoolSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const & + descriptorPoolInlineUniformBlockCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.maxInlineUniformBlockBindings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & descriptorSetAllocateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorPool ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pSetLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const & descriptorSetLayoutBinding ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.binding ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.pImmutableSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & + descriptorSetLayoutBindingFlagsCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.bindingCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pBindingFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & descriptorSetLayoutCreateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.bindingCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pBindings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & descriptorSetLayoutSupport ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.supported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & + descriptorSetVariableDescriptorCountAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pDescriptorCounts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & + descriptorSetVariableDescriptorCountLayoutSupport ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.maxVariableDescriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const & descriptorUpdateTemplateEntry ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.offset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & + descriptorUpdateTemplateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorUpdateEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pDescriptorUpdateEntries ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.templateType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorSetLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.set ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pCreateInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & deviceQueueCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pQueuePriorities ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & physicalDeviceFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.robustBufferAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fullDrawIndexUint32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.imageCubeArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.independentBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.geometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.tessellationShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sampleRateShading ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.dualSrcBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.logicOp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiDrawIndirect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.drawIndirectFirstInstance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthClamp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fillModeNonSolid ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBounds ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.wideLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.largePoints ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.alphaToOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiViewport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.samplerAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionETC2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionASTC_LDR ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionBC ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.occlusionQueryPrecise ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.pipelineStatisticsQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.vertexPipelineStoresAndAtomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fragmentStoresAndAtomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderTessellationAndGeometryPointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderImageGatherExtended ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageExtendedFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageMultisample ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageReadWithoutFormat ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageWriteWithoutFormat ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderClipDistance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderCullDistance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceResidency ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceMinLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage2D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency2Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency4Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency8Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency16Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyAliased ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.variableMultisampleRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.inheritedQueries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & deviceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.queueCreateInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pQueueCreateInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledLayerCount ); + for ( size_t i = 0; i < deviceCreateInfo.enabledLayerCount; ++i ) + { + for ( const char * p = deviceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledExtensionCount ); + for ( size_t i = 0; i < deviceCreateInfo.enabledExtensionCount; ++i ) + { + for ( const char * p = deviceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pEnabledFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const & + deviceDeviceMemoryReportCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pfnUserCallback ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const & + deviceDiagnosticsConfigCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.deviceEvent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.resourceDeviceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.memoryDeviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & + deviceGroupCommandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & deviceGroupDeviceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.physicalDeviceCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pPhysicalDevices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & + deviceGroupPresentCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.pNext ); + for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.presentMask[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.modes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & deviceGroupPresentInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pDeviceMasks ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & deviceGroupRenderPassBeginInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceMask ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceRenderAreaCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pDeviceRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & deviceGroupSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pWaitSemaphoreDeviceIndices ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pCommandBufferDeviceMasks ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pSignalSemaphoreDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & + deviceGroupSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.modes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.imageType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.extent ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.mipLevels ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.arrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.samples ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.tiling ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.initialLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const & deviceImageMemoryRequirements ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & + deviceMemoryOpaqueCaptureAddressInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.memory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & + deviceMemoryOverallocationCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.overallocationBehavior ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const & + deviceMemoryReportCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.memoryObjectId ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectHandle ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.heapIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const & devicePrivateDataCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.privateDataSlotRequestCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const & + deviceQueueGlobalPriorityCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.globalPriority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & deviceQueueInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueIndex ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & directFBSurfaceCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.dfb ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.surface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const & dispatchIndirectCommand ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.x ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.y ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.z ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.displayEvent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & displayModeParametersKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.visibleRegion ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.refreshRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & displayModeCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.parameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const & displayModePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.displayMode ); + VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.parameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & displayModeProperties2KHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.displayModeProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & + displayNativeHdrSurfaceCapabilitiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.localDimmingSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const & displayPlaneCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.supportedAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & displayPlaneCapabilities2KHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.capabilities ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & displayPlaneInfo2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.mode ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.planeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const & displayPlanePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentDisplay ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentStackIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & displayPlaneProperties2KHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.displayPlaneProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & displayPowerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.powerState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & displayPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.srcRect ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.dstRect ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.persistent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const & displayPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.display ); + for ( const char * p = displayPropertiesKHR.displayName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalResolution ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.planeReorderPossible ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.persistentContent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & displayProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.displayProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & displaySurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.displayMode ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeIndex ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeStackIndex ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.transform ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.globalAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.alphaMode ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const & drawIndexedIndirectCommand ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstIndex ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.vertexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCommand const & drawIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const & drawMeshTasksIndirectCommandNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.taskCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.firstTask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const & drmFormatModifierProperties2EXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierTilingFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const & drmFormatModifierPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierTilingFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const & + drmFormatModifierPropertiesList2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pDrmFormatModifierProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & + drmFormatModifierPropertiesListEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pDrmFormatModifierProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::EventCreateInfo const & eventCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & exportMemoryAllocateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & exportMemoryAllocateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & exportMemoryWin32HandleInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & exportMemoryWin32HandleInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.dwAccess ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & exportSemaphoreCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & + exportSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExtensionProperties const & extensionProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.extensionName[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.specVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.externalMemoryFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.compatibleHandleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & externalBufferProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.externalMemoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & externalFenceProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.compatibleHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.externalFenceFeatures ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & externalFormatANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.externalFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & externalImageFormatProperties ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.externalMemoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties const & imageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxExtent ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxMipLevels ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.sampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxResourceSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const & externalImageFormatPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.imageFormatProperties ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.externalMemoryFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.compatibleHandleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & externalMemoryBufferCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & externalMemoryImageCreateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & externalMemoryImageCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.compatibleHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.externalSemaphoreFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & fenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & fenceGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & fenceGetWin32HandleInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & + filterCubicImageViewImageFormatPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubic ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubicMinmax ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties const & formatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.linearTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.optimalTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.bufferFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::FormatProperties2 const & formatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.formatProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::FormatProperties3 const & formatProperties3 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.sType ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.linearTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.optimalTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.bufferFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const & + fragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pFragmentShadingRateAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.width ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.height ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.viewFormatCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pViewFormats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & + framebufferAttachmentsCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.attachmentImageInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pAttachmentImageInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & framebufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.width ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.height ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.layers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & + framebufferMixedSamplesCombinationNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.coverageReductionMode ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.rasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.depthStencilSamples ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.colorSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const & indirectCommandsStreamNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const & generatedCommandsInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.streamCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pStreams ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessOffset ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountOffset ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const & + generatedCommandsMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.maxSequencesCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const & vertexInputBindingDescription ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.inputRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const & vertexInputAttributeDescription ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & + pipelineVertexInputStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexBindingDescriptions ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & + pipelineInputAssemblyStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.topology ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.primitiveRestartEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & + pipelineTessellationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.patchControlPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & pipelineViewportStateCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pViewports ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.scissorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pScissors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & + pipelineRasterizationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthClampEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.rasterizerDiscardEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.polygonMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.cullMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.frontFace ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasConstantFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasSlopeFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.lineWidth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & + pipelineMultisampleStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.rasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sampleShadingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.minSampleShading ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToCoverageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToOneEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StencilOpState const & stencilOpState ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.failOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.passOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.depthFailOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareMask ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.writeMask ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.reference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & + pipelineDepthStencilStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthWriteEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthCompareOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthBoundsTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.stencilTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.front ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.back ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.minDepthBounds ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.maxDepthBounds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const & + pipelineColorBlendAttachmentState ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.blendEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.alphaBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorWriteMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & + pipelineColorBlendStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOpEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pAttachments ); + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.blendConstants[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & pipelineDynamicStateCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.dynamicStateCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pDynamicStates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & graphicsPipelineCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pVertexInputState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pInputAssemblyState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pTessellationState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pViewportState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pRasterizationState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pMultisampleState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDepthStencilState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pColorBlendState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const & graphicsShaderGroupCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pVertexInputState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pTessellationState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const & + graphicsPipelineShaderGroupsCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pipelineCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pPipelines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XYColorEXT const & xYColorEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.x ); + VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & hdrMetadataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryRed ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryGreen ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryBlue ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.whitePoint ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxLuminance ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.minLuminance ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxContentLightLevel ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxFrameAverageLightLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & headlessSurfaceCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.flags ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & iOSSurfaceCreateInfoMVK ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.sType ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.flags ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pView ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageBlit const & imageBlit ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcOffsets[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstOffsets[i] ); + } + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const & + imageFormatConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.imageCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.requiredFormatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sysmemPixelFormat ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.colorSpaceCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pColorSpaces ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const & imageConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.formatConstraintsCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pFormatConstraints ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.bufferCollectionConstraints ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.flags ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCopy const & imageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout const & subresourceLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.offset ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.size ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.rowPitch ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.arrayPitch ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.depthPitch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & + imageDrmFormatModifierExplicitCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pPlaneLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & + imageDrmFormatModifierListCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pDrmFormatModifiers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & + imageDrmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.drmFormatModifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & imageFormatListCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.viewFormatCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pViewFormats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & imageFormatProperties2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.imageFormatProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & imageMemoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & imageMemoryRequirementsInfo2 ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.image ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & + imagePipeSurfaceCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.imagePipeHandle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & + imagePlaneMemoryRequirementsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageResolve const & imageResolve ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageResolve2 const & imageResolve2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & + imageSparseMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.image ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & imageStencilUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.stencilUsage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.swapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & imageViewASTCDecodeModeEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.decodeMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const & imageViewAddressPropertiesNVX ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.viewType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & imageViewHandleInfoNVX ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const & imageViewMinLodCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.minLod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & imageViewUsageCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.usage ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & + importAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.buffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & importFenceFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fd ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & importFenceWin32HandleInfoKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const & + importMemoryBufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & importMemoryFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.fd ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & importMemoryHostPointerInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pHostPointer ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & importMemoryWin32HandleInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & importMemoryWin32HandleInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const & + importMemoryZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & importSemaphoreFdInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.fd ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & + importSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const & + importSemaphoreZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.zirconHandle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const & indirectCommandsLayoutTokenNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.tokenType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.stream ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.offset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexBindingUnit ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexDynamicStride ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantPipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantShaderStageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantOffset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantSize ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indirectStateFlags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indexTypeCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypes ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypeValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & + indirectCommandsLayoutCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.tokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pTokens ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.streamCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pStreamStrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & + initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.inputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.aspectMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & instanceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pApplicationInfo ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledLayerCount ); + for ( size_t i = 0; i < instanceCreateInfo.enabledLayerCount; ++i ) + { + for ( const char * p = instanceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledExtensionCount ); + for ( size_t i = 0; i < instanceCreateInfo.enabledExtensionCount; ++i ) + { + for ( const char * p = instanceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerProperties const & layerProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.layerName[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.specVersion ); + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.implementationVersion ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.description[i] ); + } + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & macOSSurfaceCreateInfoMVK ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.sType ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.flags ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pView ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & mappedMemoryRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.sType ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.memory ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & memoryAllocateFlagsInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & memoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.memoryTypeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrier const & memoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.dstAccessMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & memoryDedicatedAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.prefersDedicatedAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.requiresDedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & memoryFdPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.memoryTypeBits ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & + memoryGetAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.memory ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & memoryGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const & memoryGetRemoteAddressInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & memoryGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const & + memoryGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryHeap const & memoryHeap ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & memoryHostPointerPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.memoryTypeBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & + memoryOpaqueCaptureAddressAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.opaqueCaptureAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & memoryPriorityAllocateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.priority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements const & memoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.alignment ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.memoryTypeBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & memoryRequirements2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.memoryRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryType const & memoryType ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryType.propertyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryType.heapIndex ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & memoryWin32HandlePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const & + memoryZirconHandlePropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & metalSurfaceCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pLayer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.firstIndex ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.vertexOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const & multiDrawInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.vertexCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & multisamplePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.maxSampleLocationGridSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const & + multiviewPerViewAttributesInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributesPositionXOnly ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE const & mutableDescriptorTypeListVALVE ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListVALVE.descriptorTypeCount ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListVALVE.pDescriptorTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE const & + mutableDescriptorTypeCreateInfoVALVE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.mutableDescriptorTypeListCount ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoVALVE.pMutableDescriptorTypeLists ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.desiredPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.actualPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.earliestPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentMargin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & + performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.type ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & + performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.flags ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.category[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.description[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.unit ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.scope ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.storage ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.uuid[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.type ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.enable ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.parameter ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.counterPassIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & + performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & + physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageInputOutput16 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & + physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4R4G4B4 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4B4G4R4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & + physicalDevice8BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.uniformAndStorageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storagePushConstant8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & + physicalDeviceASTCDecodeFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.decodeModeSharedExponent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const & + physicalDeviceAccelerationStructureFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureIndirectBuild ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureHostCommands ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceAccelerationStructureFeaturesKHR.descriptorBindingAccelerationStructureUpdateAfterBind ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const & + physicalDeviceAccelerationStructurePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxInstanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPrimitiveCount ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( + seed, + physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetUpdateAfterBindAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceAccelerationStructurePropertiesKHR.minAccelerationStructureScratchOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & + physicalDeviceBlendOperationAdvancedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.advancedBlendCoherentOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & + physicalDeviceBlendOperationAdvancedPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendMaxColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendIndependentBlend ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedSrcColor ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedDstColor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendCorrelatedOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendAllOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const & + physicalDeviceBorderColorSwizzleFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzleFromImage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & + physicalDeviceBufferDeviceAddressFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressMultiDevice ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & + physicalDeviceBufferDeviceAddressFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressMultiDevice ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & + physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.deviceCoherentMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & + physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.colorWriteEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & + physicalDeviceComputeShaderDerivativesFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupQuads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupLinear ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & + physicalDeviceConditionalRenderingFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.conditionalRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.inheritedConditionalRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & + physicalDeviceConservativeRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & + physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & + physicalDeviceCooperativeMatrixPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & + physicalDeviceCornerSampledImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.cornerSampledImage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & + physicalDeviceCoverageReductionModeFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.coverageReductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const & + physicalDeviceCustomBorderColorFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColorWithoutFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const & + physicalDeviceCustomBorderColorPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.maxCustomBorderColorSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & + physicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.dedicatedAllocationImageAliasing ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const & + physicalDeviceDepthClipControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.depthClipControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & + physicalDeviceDepthClipEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.depthClipEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & + physicalDeviceDepthStencilResolveProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedDepthResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedStencilResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolveNone ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolve ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & + physicalDeviceDescriptorIndexingFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingPartiallyBound ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingFeatures.descriptorBindingVariableDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.runtimeDescriptorArray ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & + physicalDeviceDescriptorIndexingProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.robustBufferAccessUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.quadDivergentImplicitLod ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageUpdateAfterBindResources ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & + physicalDeviceDeviceGeneratedCommandsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.deviceGeneratedCommands ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & + physicalDeviceDeviceGeneratedCommandsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxGraphicsShaderGroupCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenOffset ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamStride ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesCountBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesIndexBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minIndirectCommandsBufferOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const & + physicalDeviceDeviceMemoryReportFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.deviceMemoryReport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const & + physicalDeviceDiagnosticsConfigFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.diagnosticsConfig ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & + physicalDeviceDiscardRectanglePropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.maxDiscardRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & physicalDeviceDriverProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverID ); + for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverName[i] ); + } + for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverInfo[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.conformanceVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const & physicalDeviceDrmPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasPrimary ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasRender ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMajor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMinor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMajor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMinor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const & + physicalDeviceDynamicRenderingFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.dynamicRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & + physicalDeviceExclusiveScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.exclusiveScissor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const & + physicalDeviceExtendedDynamicState2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2LogicOp ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2PatchControlPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const & + physicalDeviceExtendedDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.extendedDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & + physicalDeviceExternalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & physicalDeviceExternalFenceInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & + physicalDeviceExternalImageFormatInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & + physicalDeviceExternalMemoryHostPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.minImportedHostPointerAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const & + physicalDeviceExternalMemoryRDMAFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.externalMemoryRDMA ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & + physicalDeviceExternalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.features ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & + physicalDeviceFloatControlsProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.denormBehaviorIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.roundingModeIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat64 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const & + physicalDeviceFragmentDensityMap2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.fragmentDensityMapDeferred ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const & + physicalDeviceFragmentDensityMap2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledLoads ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledCoarseReconstructionEarlyAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxSubsampledArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentDensityMap2PropertiesEXT.maxDescriptorSetSubsampledSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & + physicalDeviceFragmentDensityMapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapNonSubsampledImages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & + physicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.fragmentDensityMapOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & + physicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.fragmentDensityOffsetGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & + physicalDeviceFragmentDensityMapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.minFragmentDensityTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.maxFragmentDensityTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.fragmentDensityInvocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & + physicalDeviceFragmentShaderBarycentricFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesNV.fragmentShaderBarycentric ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & + physicalDeviceFragmentShaderInterlockFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderSampleInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderPixelInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderShadingRateInterlock ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & + physicalDeviceFragmentShadingRateEnumsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.fragmentShadingRateEnums ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.supersampleFragmentShadingRates ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentShadingRateEnumsFeaturesNV.noInvocationFragmentShadingRates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & + physicalDeviceFragmentShadingRateEnumsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.maxFragmentShadingRateInvocationCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const & + physicalDeviceFragmentShadingRateFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pipelineFragmentShadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.primitiveFragmentShadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.attachmentFragmentShadingRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const & + physicalDeviceFragmentShadingRateKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.fragmentSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const & + physicalDeviceFragmentShadingRatePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.minFragmentShadingRateAttachmentTexelSize ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSize ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.primitiveFragmentShadingRateWithMultipleViewports ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.layeredShadingRateAttachments ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateNonTrivialCombinerOps ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSizeAspectRatio ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateCoverageSamples ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateRasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderDepthStencilWrites ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderSampleMask ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithConservativeRasterization ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithFragmentShaderInterlock ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithCustomSampleLocations ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateStrictMultiplyCombiner ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & + physicalDeviceGlobalPriorityQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.globalPriorityQuery ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & physicalDeviceGroupProperties ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDeviceCount ); + for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDevices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.subsetAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & + physicalDeviceHostQueryResetFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.hostQueryReset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & physicalDeviceIDProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceUUID[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.driverUUID[i] ); + } + for ( size_t i = 0; i < VK_LUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceNodeMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUIDValid ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & + physicalDeviceImageDrmFormatModifierInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pQueueFamilyIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & physicalDeviceImageFormatInfo2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.format ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.tiling ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const & + physicalDeviceImageRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.robustImageAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & + physicalDeviceImageViewImageFormatInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.imageViewType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const & + physicalDeviceImageViewMinLodFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.minLod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & + physicalDeviceImagelessFramebufferFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & + physicalDeviceIndexTypeUint8FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.indexTypeUint8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const & + physicalDeviceInheritedViewportScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.inheritedViewportScissor2D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const & + physicalDeviceInlineUniformBlockFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.inlineUniformBlock ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceInlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const & + physicalDeviceInlineUniformBlockProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxInlineUniformBlockSize ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const & + physicalDeviceInvocationMaskFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.invocationMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const & physicalDeviceLimits ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension1D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension2D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimensionCube ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelBufferElements ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxUniformBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxStorageBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPushConstantsSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxMemoryAllocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAllocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.bufferImageGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sparseAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxBoundDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributeOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindingStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationGenerationLevel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationPatchSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerPatchOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlTotalOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryShaderInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryTotalOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentOutputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentDualSrcAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentCombinedOutputResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeSharedMemorySize ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subTexelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.mipmapPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndexedIndexValue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndirectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewports ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewportDimensions[i] ); + } + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportBoundsRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportSubPixelBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minMemoryMapAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minUniformBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minStorageBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelGatherOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelGatherOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minInterpolationOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxInterpolationOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelInterpolationOffsetBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferColorSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferDepthSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferStencilSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferNoAttachmentsSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageColorSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageIntegerSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageDepthSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageStencilSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.storageImageSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSampleMaskWords ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampComputeAndGraphics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxClipDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCullDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCombinedClipAndCullDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.discreteQueuePriorities ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeRange[i] ); + } + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.strictLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.standardSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyRowPitchAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.nonCoherentAtomSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & + physicalDeviceLineRasterizationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledSmoothLines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & + physicalDeviceLineRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.lineSubPixelPrecisionBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const & + physicalDeviceLinearColorAttachmentFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.linearColorAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & + physicalDeviceMaintenance3Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxPerSetDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxMemoryAllocationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const & + physicalDeviceMaintenance4Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.maintenance4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const & + physicalDeviceMaintenance4Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.maxBufferSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & + physicalDeviceMemoryBudgetPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapBudget[i] ); + } + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapUsage[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & + physicalDeviceMemoryPriorityFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.memoryPriority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const & physicalDeviceMemoryProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypeCount ); + for ( size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypes[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeapCount ); + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeaps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & physicalDeviceMemoryProperties2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.memoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & + physicalDeviceMeshShaderFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.taskShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.meshShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & + physicalDeviceMeshShaderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxDrawMeshTasksCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskTotalMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskOutputCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshTotalMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputPrimitives ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerVertexGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerPrimitiveGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const & + physicalDeviceMultiDrawFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.multiDraw ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const & + physicalDeviceMultiDrawPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.maxMultiDrawCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & physicalDeviceMultiviewFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiview ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewGeometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewTessellationShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & + physicalDeviceMultiviewPerViewAttributesPropertiesNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceMultiviewPerViewAttributesPropertiesNVX.perViewPositionAllComponents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & + physicalDeviceMultiviewProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewInstanceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & + physicalDeviceMutableDescriptorTypeFeaturesVALVE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesVALVE.mutableDescriptorType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & + physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDomain ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciBus ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciFunction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & + physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pageableDeviceLocalMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & + physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterMultipleQueryPools ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & + physicalDevicePerformanceQueryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.allowCommandBufferQueryCopies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const & + physicalDevicePipelineCreationCacheControlFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pipelineCreationCacheControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & + physicalDevicePipelineExecutablePropertiesFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pipelineExecutableInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & + physicalDevicePointClippingProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pointClippingBehavior ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const & + physicalDevicePortabilitySubsetFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.constantAlphaColorBlendFactors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.events ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatReinterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageView2DOn3DImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.multisampleArrayImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.mutableComparisonSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pointPolygons ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.samplerMipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.separateStencilMaskRef ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDevicePortabilitySubsetFeaturesKHR.shaderSampleRateInterpolationFunctions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationIsolines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationPointMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.triangleFans ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.vertexAttributeAccessBeyondStride ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const & + physicalDevicePortabilitySubsetPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDevicePortabilitySubsetPropertiesKHR.minVertexInputBindingStrideAlignment ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & + physicalDevicePresentIdFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.presentId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const & + physicalDevicePresentWaitFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.presentWait ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & + physicalDevicePrimitiveTopologyListRestartFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyListRestart ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyPatchListRestart ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const & + physicalDevicePrivateDataFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.privateData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & + physicalDeviceProtectedMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.protectedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & + physicalDeviceProtectedMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.protectedNoFault ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const & + physicalDeviceProvokingVertexFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.provokingVertexLast ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceProvokingVertexFeaturesEXT.transformFeedbackPreservesProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const & + physicalDeviceProvokingVertexPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.provokingVertexModePerPipeline ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceProvokingVertexPropertiesEXT.transformFeedbackPreservesTriangleFanProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & + physicalDevicePushDescriptorPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.maxPushDescriptors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & + physicalDeviceRGBA10X6FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.formatRgba10x6WithoutYCbCrSampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & + physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.rasterizationOrderColorAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.rasterizationOrderDepthAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesARM.rasterizationOrderStencilAttachmentAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const & + physicalDeviceRayQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.rayQuery ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const & + physicalDeviceRayTracingMotionBlurFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlur ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlurPipelineTraceRaysIndirect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const & + physicalDeviceRayTracingPipelineFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipeline ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineTraceRaysIndirect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTraversalPrimitiveCulling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const & + physicalDeviceRayTracingPipelinePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxShaderGroupStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupBaseAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleCaptureReplaySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayDispatchInvocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayHitAttributeSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & + physicalDeviceRayTracingPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupHandleSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxShaderGroupStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupBaseAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxInstanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxDescriptorSetAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & + physicalDeviceRepresentativeFragmentTestFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.representativeFragmentTest ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const & + physicalDeviceRobustness2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustBufferAccess2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustImageAccess2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.nullDescriptor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const & + physicalDeviceRobustness2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustStorageBufferAccessSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustUniformBufferAccessSizeAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & + physicalDeviceSampleLocationsPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.maxSampleLocationGridSize ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationCoordinateRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSubPixelBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.variableSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & + physicalDeviceSamplerFilterMinmaxProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxSingleComponentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxImageComponentMapping ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & + physicalDeviceSamplerYcbcrConversionFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.samplerYcbcrConversion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & + physicalDeviceScalarBlockLayoutFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.scalarBlockLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & + physicalDeviceSeparateDepthStencilLayoutsFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.separateDepthStencilLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & + physicalDeviceShaderAtomicFloat2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat64AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat64AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderImageFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sparseImageFloat32AtomicMinMax ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const & + physicalDeviceShaderAtomicFloatFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32AtomicAdd ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & + physicalDeviceShaderAtomicInt64Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderBufferInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderSharedInt64Atomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & + physicalDeviceShaderClockFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderSubgroupClock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderDeviceClock ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & + physicalDeviceShaderCoreProperties2AMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.shaderCoreFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.activeComputeUnitCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & + physicalDeviceShaderCorePropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderEngineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderArraysPerEngineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.computeUnitsPerShaderArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.simdPerComputeUnit ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minSgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxSgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprAllocationGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minVgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxVgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprAllocationGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & + physicalDeviceShaderDemoteToHelperInvocationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & + physicalDeviceShaderDrawParametersFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.shaderDrawParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & + physicalDeviceShaderFloat16Int8Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderInt8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & + physicalDeviceShaderImageAtomicInt64FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.shaderImageInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sparseImageInt64Atomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & + physicalDeviceShaderImageFootprintFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.imageFootprint ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const & + physicalDeviceShaderIntegerDotProductFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.shaderIntegerDotProduct ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const & + physicalDeviceShaderIntegerDotProductProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties + .integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & + physicalDeviceShaderIntegerFunctions2FeaturesINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.shaderIntegerFunctions2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & + physicalDeviceShaderSMBuiltinsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.shaderSMBuiltins ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & + physicalDeviceShaderSMBuiltinsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderSMCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderWarpsPerSM ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & + physicalDeviceShaderSubgroupExtendedTypesFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & + physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.shaderSubgroupUniformControlFlow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const & + physicalDeviceShaderTerminateInvocationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.shaderTerminateInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & + physicalDeviceShadingRateImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateCoarseSampleOrder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & + physicalDeviceShadingRateImagePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRatePaletteSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateMaxCoarseSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & + physicalDeviceSparseImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.format ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.tiling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & + physicalDeviceSubgroupProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.subgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.quadOperationsInAllStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const & + physicalDeviceSubgroupSizeControlFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.subgroupSizeControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.computeFullSubgroups ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const & + physicalDeviceSubgroupSizeControlProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.minSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxComputeWorkgroupSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.requiredSubgroupSizeStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const & + physicalDeviceSubpassShadingFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.subpassShading ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const & + physicalDeviceSubpassShadingPropertiesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceSubpassShadingPropertiesHUAWEI.maxSubpassShadingWorkgroupSizeAspectRatio ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & physicalDeviceSurfaceInfo2KHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.surface ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const & + physicalDeviceSynchronization2Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.synchronization2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & + physicalDeviceTexelBufferAlignmentFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.texelBufferAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const & + physicalDeviceTexelBufferAlignmentProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const & + physicalDeviceTextureCompressionASTCHDRFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.textureCompressionASTC_HDR ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & + physicalDeviceTimelineSemaphoreFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.timelineSemaphore ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & + physicalDeviceTimelineSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.maxTimelineSemaphoreValueDifference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const & physicalDeviceToolProperties ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.pNext ); + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.version[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.purposes ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.description[i] ); + } + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.layer[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & + physicalDeviceTransformFeedbackFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.transformFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.geometryStreams ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & + physicalDeviceTransformFeedbackPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreams ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreamDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackQueries ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackStreamsLinesTriangles ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackRasterizationStreamSelect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackDraw ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & + physicalDeviceUniformBufferStandardLayoutFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.uniformBufferStandardLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & + physicalDeviceVariablePointersFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointersStorageBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & + physicalDeviceVertexAttributeDivisorFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateZeroDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & + physicalDeviceVertexAttributeDivisorPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.maxVertexAttribDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & + physicalDeviceVertexInputDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.vertexInputDynamicState ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileKHR const & videoProfileKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.videoCodecOperation ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.chromaSubsampling ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.lumaBitDepth ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileKHR.chromaBitDepth ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfilesKHR const & videoProfilesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.profileCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfilesKHR.pProfiles ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const & + physicalDeviceVideoFormatInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.imageUsage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pVideoProfiles ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & physicalDeviceVulkan11Features ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageInputOutput16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiview ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewGeometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewTessellationShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointersStorageBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.protectedMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.samplerYcbcrConversion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.shaderDrawParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & + physicalDeviceVulkan11Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceUUID[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.driverUUID[i] ); + } + for ( size_t i = 0; i < VK_LUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceNodeMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUIDValid ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupQuadOperationsInAllStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pointClippingBehavior ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewInstanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.protectedNoFault ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxPerSetDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMemoryAllocationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & physicalDeviceVulkan12Features ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerMirrorClampToEdge ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.drawIndirectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformAndStorageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storagePushConstant8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderBufferInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSharedInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInt8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSampledImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingSampledImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Features.descriptorBindingUniformTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Features.descriptorBindingStorageTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUpdateUnusedWhilePending ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingPartiallyBound ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingVariableDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.runtimeDescriptorArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerFilterMinmax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.scalarBlockLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.imagelessFramebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformBufferStandardLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSubgroupExtendedTypes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.separateDepthStencilLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.hostQueryReset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.timelineSemaphore ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressMultiDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelDeviceScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelAvailabilityVisibilityChains ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputViewportIndex ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputLayer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.subgroupBroadcastDynamicId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & + physicalDeviceVulkan12Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverID ); + for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverName[i] ); + } + for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverInfo[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.conformanceVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.denormBehaviorIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.roundingModeIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxUpdateAfterBindDescriptorsInAllPools ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.shaderUniformBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSampledImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.shaderStorageBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.shaderInputAttachmentArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.robustBufferAccessUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.quadDivergentImplicitLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageUpdateAfterBindResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedDepthResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedStencilResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolveNone ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolve ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxSingleComponentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxImageComponentMapping ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxTimelineSemaphoreValueDifference ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.framebufferIntegerColorSampleCounts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const & physicalDeviceVulkan13Features ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.robustImageAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.inlineUniformBlock ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Features.descriptorBindingInlineUniformBlockUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pipelineCreationCacheControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.privateData ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderDemoteToHelperInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderTerminateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.subgroupSizeControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.computeFullSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.synchronization2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.textureCompressionASTC_HDR ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderZeroInitializeWorkgroupMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.dynamicRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderIntegerDotProduct ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.maintenance4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const & + physicalDeviceVulkan13Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.minSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxComputeWorkgroupSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.requiredSubgroupSizeStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformBlockSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformTotalSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Properties.integerDotProduct16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Properties.integerDotProduct32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Properties.integerDotProduct64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkan13Properties + .integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxBufferSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & + physicalDeviceVulkanMemoryModelFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & + physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayoutScalarBlockLayout ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout8BitAccess ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout16BitAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & + physicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.ycbcr2plane444Formats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & + physicalDeviceYcbcrImageArraysFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.ycbcrImageArrays ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & + physicalDeviceZeroInitializeWorkgroupMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( + seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.shaderZeroInitializeWorkgroupMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & pipelineCacheCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & pipelineCacheHeaderVersionOne ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerVersion ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.deviceID ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.pipelineCacheUUID[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & + pipelineColorBlendAdvancedStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.srcPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.dstPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.blendOverlap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const & pipelineColorWriteCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pColorWriteEnables ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & + pipelineCompilerControlCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.compilerControlFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & + pipelineCoverageModulationStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pCoverageModulationTable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & + pipelineCoverageReductionStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.coverageReductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & + pipelineCoverageToColorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorLocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.duration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const & + pipelineCreationFeedbackCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineCreationFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pipelineStageCreationFeedbackCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineStageCreationFeedbacks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & + pipelineDiscardRectangleStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pDiscardRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & pipelineExecutableInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.executableIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & + pipelineExecutableInternalRepresentationKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pNext ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.isText ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & pipelineExecutablePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.stages ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.subgroupSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const & + pipelineFragmentShadingRateEnumStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRateType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRate ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.combinerOps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const & + pipelineFragmentShadingRateStateCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.fragmentSize ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.combinerOps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & pipelineLayoutCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pPushConstantRanges ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & + pipelineRasterizationConservativeStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, + pipelineRasterizationConservativeStateCreateInfoEXT.conservativeRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, + pipelineRasterizationConservativeStateCreateInfoEXT.extraPrimitiveOverestimationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & + pipelineRasterizationDepthClipStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.depthClipEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & + pipelineRasterizationLineStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.stippledLineEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStippleFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStipplePattern ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const & + pipelineRasterizationProvokingVertexStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.provokingVertexMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & + pipelineRasterizationStateRasterizationOrderAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.rasterizationOrder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & + pipelineRasterizationStateStreamCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.rasterizationStream ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const & pipelineRenderingCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.stencilAttachmentFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & + pipelineRepresentativeFragmentTestStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, + pipelineRepresentativeFragmentTestStateCreateInfoNV.representativeFragmentTestEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & + pipelineSampleLocationsStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const & + pipelineShaderStageRequiredSubgroupSizeCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.requiredSubgroupSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & + pipelineTessellationDomainOriginStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.domainOrigin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const & + vertexInputBindingDivisorDescriptionEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.divisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & + pipelineVertexInputDivisorStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.vertexBindingDivisorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.pVertexBindingDivisors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & + pipelineViewportCoarseSampleOrderStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sampleOrderType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.customSampleOrderCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pCustomSampleOrders ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const & + pipelineViewportDepthClipControlCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.negativeOneToOne ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & + pipelineViewportExclusiveScissorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.exclusiveScissorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pExclusiveScissors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const & shadingRatePaletteNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.shadingRatePaletteEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.pShadingRatePaletteEntries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & + pipelineViewportShadingRateImageStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.shadingRateImageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pShadingRatePalettes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const & viewportSwizzleNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.x ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.y ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.z ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.w ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & + pipelineViewportSwizzleStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pViewportSwizzles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ViewportWScalingNV const & viewportWScalingNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.xcoeff ); + VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.ycoeff ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & + pipelineViewportWScalingStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportWScalingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pViewportWScalings ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_GGP ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & presentFrameTokenGGP ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.frameToken ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentIdKHR const & presentIdKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pPresentIds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & presentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pSwapchains ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pImageIndices ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pResults ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RectLayerKHR const & rectLayerKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.extent ); + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.layer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentRegionKHR const & presentRegionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.rectangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.pRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & presentRegionsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const & presentTimeGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.desiredPresentTime ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & presentTimesInfoGOOGLE ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pTimes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & protectedSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.protectedSubmit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & + queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.counterIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pCounterIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & + queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.performanceCountersSampling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const & + queueFamilyCheckpointProperties2NV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.checkpointExecutionStageMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & + queueFamilyCheckpointPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.checkpointExecutionStageMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const & + queueFamilyGlobalPriorityPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorityCount ); + for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorities[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueFlags ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueCount ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.timestampValidBits ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.minImageTransferGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & queueFamilyProperties2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.queueFamilyProperties ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR const & + queueFamilyQueryResultStatusProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusProperties2KHR.supported ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const & + rayTracingShaderGroupCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.type ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.generalShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.closestHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.anyHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.intersectionShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pShaderGroupCaptureReplayHandle ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const & + rayTracingPipelineInterfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayHitAttributeSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & rayTracingPipelineCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.maxPipelineRayRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInterface ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & + rayTracingShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.generalShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.closestHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.anyHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.intersectionShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rayTracingPipelineCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.maxRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.layout ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const & refreshCycleDurationGOOGLE ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, refreshCycleDurationGOOGLE.refreshDuration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & renderPassAttachmentBeginInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & renderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderArea ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.clearValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pClearValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassDescription const & subpassDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.flags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.inputAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pResolveAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pDepthStencilAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.preserveAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pPreserveAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassDependency const & subpassDependency ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dependencyFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & renderPassCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pSubpasses ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pDependencies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & subpassDescription2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.inputAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pResolveAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pDepthStencilAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.preserveAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pPreserveAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & subpassDependency2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dependencyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.viewOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & renderPassCreateInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pSubpasses ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pDependencies ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.correlatedViewMaskCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pCorrelatedViewMasks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & + renderPassFragmentDensityMapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.fragmentDensityMapAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & + renderPassInputAttachmentAspectCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.aspectReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pAspectReferences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & renderPassMultiviewCreateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewMasks ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.correlationMaskCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pCorrelationMasks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const & subpassSampleLocationsEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.subpassIndex ); + VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & + renderPassSampleLocationsBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.attachmentInitialSampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pAttachmentInitialSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.postSubpassSampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pPostSubpassSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const & + renderPassTransformBeginInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.transform ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const & + renderingFragmentDensityMapAttachmentInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const & + renderingFragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInfo const & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.renderArea ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pDepthAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pStencilAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const & resolveImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const & + samplerBorderColorComponentMappingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.components ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.srgb ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.magFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipmapMode ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeU ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeV ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeW ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.anisotropyEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.borderColor ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.unnormalizedCoordinates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & samplerReductionModeCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.reductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & + samplerYcbcrConversionCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.xChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.yChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.chromaFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.forceExplicitReconstruction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & + samplerYcbcrConversionImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.combinedImageSamplerDescriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & samplerYcbcrConversionInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.conversion ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & screenSurfaceCreateInfoQNX ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.context ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & semaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & semaphoreGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & semaphoreGetWin32HandleInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const & + semaphoreGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & semaphoreSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.value ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.value ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.stageMask ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.deviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & semaphoreTypeCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.semaphoreType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.initialValue ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & semaphoreWaitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.semaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pCode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & + shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.validationCache ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const & shaderResourceUsageAMD ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedSgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsSizePerLocalWorkGroup ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsUsageSizeInBytes ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.scratchMemUsageInBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const & shaderStatisticsInfoAMD ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.shaderStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.resourceUsage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalSgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableSgprs ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.computeWorkGroupSize[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & + sharedPresentSurfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sharedPresentSupportedUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const & sparseImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.imageGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & sparseImageFormatProperties2 ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const & sparseImageMemoryRequirements ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.formatProperties ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailFirstLod ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailSize ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & sparseImageMemoryRequirements2 ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.memoryRequirements ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_GGP ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & + streamDescriptorSurfaceCreateInfoGGP ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.sType ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.flags ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.streamDescriptor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.stride ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubmitInfo const & submitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitDstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pCommandBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pSignalSemaphores ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubmitInfo2 const & submitInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.waitSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pWaitSemaphoreInfos ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.commandBufferInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pCommandBufferInfos ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.signalSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pSignalSemaphoreInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.contents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & + subpassDescriptionDepthStencilResolve ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.depthResolveMode ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.stencilResolveMode ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pDepthStencilResolveAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassEndInfo const & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const & + subpassFragmentDensityMapOffsetEndInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.fragmentDensityOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.pFragmentDensityOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const & + subpassShadingPipelineCreateInfoHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.subpass ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & surfaceCapabilities2EXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentTransform ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedCompositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedUsageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedSurfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const & surfaceCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentTransform ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedCompositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & surfaceCapabilities2KHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.surfaceCapabilities ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & + surfaceCapabilitiesFullScreenExclusiveEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.fullScreenExclusiveSupported ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.format ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.colorSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.surfaceFormat ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & + surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.fullScreenExclusive ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & + surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.hmonitor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.supportsProtected ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.surfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.surface ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageColorSpace ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageUsage ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageSharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.preTransform ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.compositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.presentMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.clipped ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.oldSwapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & + swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.localDimmingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & + textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.supportsTextureGatherLODBiasAMD ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.waitSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.signalSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pSignalSemaphoreValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.enabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pEnabledValidationFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.disabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pDisabledValidationFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.disabledValidationCheckCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pDisabledValidationChecks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & + vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & + vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.inputRate ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.divisor ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_VI_NN ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.flags ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_VI_NN*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & videoPictureResourceKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.codedOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.codedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceKHR.imageViewBinding ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR const & videoReferenceSlotKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.slotIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotKHR.pPictureResource ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.codecQualityPreset ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pReferenceSlots ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR const & videoBindMemoryKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memoryBindIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoBindMemoryKHR.memorySize ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.capabilityFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.videoPictureExtentGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxReferencePicturesSlotsCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxReferencePicturesActiveCount ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.flags ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const & videoDecodeH264CapabilitiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.maxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.fieldOffsetGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.stdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const & videoDecodeH264DpbSlotInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pStdReferenceInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT const & videoDecodeH264MvcEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcEXT.pStdMvc ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT const & videoDecodeH264PictureInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.slicesCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pSlicesDataOffsets ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT const & videoDecodeH264ProfileEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileEXT.pictureLayout ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT const & + videoDecodeH264SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionCreateInfoEXT.pStdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const & + videoDecodeH264SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.spsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pSpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.ppsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pPpsStd ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const & + videoDecodeH264SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxSpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxPpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.pParametersAddInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT const & videoDecodeH265CapabilitiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.maxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.stdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT const & videoDecodeH265DpbSlotInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.pStdReferenceInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT const & videoDecodeH265PictureInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.slicesCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pSlicesDataOffsets ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT const & videoDecodeH265ProfileEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileEXT.stdProfileIdc ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT const & + videoDecodeH265SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionCreateInfoEXT.pStdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const & + videoDecodeH265SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.spsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pSpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.ppsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pPpsStd ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const & + videoDecodeH265SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxSpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxPpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.pParametersAddInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.codedOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.codedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.dstPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pReferenceSlots ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const & videoEncodeH264CapabilitiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.inputModeFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.outputModeFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.minPictureSizeInMbs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxPictureSizeInMbs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.inputImageDataAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxNumL0ReferenceForP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxNumL0ReferenceForB ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxNumL1Reference ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.qualityLevelCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.stdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const & videoEncodeH264DpbSlotInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.slotIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pStdPictureInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT const & + videoEncodeH264EmitPictureParametersEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.spsId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.emitSpsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.ppsIdEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersEXT.ppsIdEntries ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & videoEncodeH264FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameBSize ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT const & videoEncodeH264NaluSliceEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pSliceHeaderStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.mbCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.refFinalList0EntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pRefFinalList0Entries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.refFinalList1EntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceEXT.pRefFinalList1Entries ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT const & videoEncodeH264ProfileEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileEXT.stdProfileIdc ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & videoEncodeH264QpEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpB ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const & + videoEncodeH264RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.rateControlStructure ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.temporalLayerCount ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const & + videoEncodeH264RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.temporalLayerId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useInitialRcQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.initialRcQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.maxFrameSize ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT const & + videoEncodeH264SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.maxPictureSizeInMbs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoEXT.pStdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const & + videoEncodeH264SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.spsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pSpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.ppsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pPpsStd ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const & + videoEncodeH264SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxSpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxPpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pParametersAddInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const & videoEncodeH264VclFrameInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.refDefaultFinalList0EntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pRefDefaultFinalList0Entries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.refDefaultFinalList1EntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pRefDefaultFinalList1Entries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.naluSliceEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNaluSliceEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pCurrentPictureInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const & videoEncodeH265CapabilitiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.inputModeFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.outputModeFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.ctbSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.inputImageDataAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumL0ReferenceForP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumL0ReferenceForB ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumL1Reference ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxNumSubLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.qualityLevelCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.stdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const & videoEncodeH265DpbSlotInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.slotIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.pStdReferenceInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT const & + videoEncodeH265EmitPictureParametersEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.vpsId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.spsId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.emitVpsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.emitSpsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.ppsIdEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersEXT.ppsIdEntries ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & videoEncodeH265FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameBSize ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT const & + videoEncodeH265ReferenceListsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.referenceList0EntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pReferenceList0Entries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.referenceList1EntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pReferenceList1Entries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsEXT.pReferenceModifications ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceEXT const & videoEncodeH265NaluSliceEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceEXT.ctbCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceEXT.pReferenceFinalLists ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceEXT.pSliceHeaderStd ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT const & videoEncodeH265ProfileEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileEXT.stdProfileIdc ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & videoEncodeH265QpEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpB ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const & + videoEncodeH265RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.rateControlStructure ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.subLayerCount ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const & + videoEncodeH265RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.temporalId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useInitialRcQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.initialRcQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.maxFrameSize ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT const & + videoEncodeH265SessionCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoEXT.pStdExtensionVersion ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const & + videoEncodeH265SessionParametersAddInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.vpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pVpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.spsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pSpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.ppsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pPpsStd ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const & + videoEncodeH265SessionParametersCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxVpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxSpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxPpsStdCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pParametersAddInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const & videoEncodeH265VclFrameInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pReferenceFinalLists ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.naluSliceEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pNaluSliceEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pCurrentPictureInfo ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.qualityLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.codedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferMaxRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.srcPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pReferenceSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.precedingExternallyEncodedBytes ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const & + videoEncodeRateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.averageBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.maxBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateNumerator ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateDenominator ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.virtualBufferSizeInMs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.initialVirtualBufferSizeInMs ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const & videoEncodeRateControlInfoKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.rateControlMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pLayerConfigs ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const & videoEndCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.flags ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const & videoFormatPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.format ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR const & videoGetMemoryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.memoryBindIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoGetMemoryPropertiesKHR.pMemoryRequirements ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR const & videoQueueFamilyProperties2KHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoQueueFamilyProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoQueueFamilyProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoQueueFamilyProperties2KHR.videoCodecOperations ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & videoSessionCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pVideoProfile ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pictureFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePicturesFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxReferencePicturesSlotsCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxReferencePicturesActiveCount ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & + videoSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSessionParametersTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSession ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const & + videoSessionParametersUpdateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.updateSequenceCount ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & waylandSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.display ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.surface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & + win32KeyedMutexAcquireReleaseInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.acquireCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireKeys ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireTimeouts ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.releaseCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseKeys ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & + win32KeyedMutexAcquireReleaseInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.acquireCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireKeys ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireTimeoutMilliseconds ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.releaseCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseKeys ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & win32SurfaceCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hinstance ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hwnd ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pImageInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pBufferInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pTexelBufferView ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const & + writeDescriptorSetAccelerationStructureKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & + writeDescriptorSetAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const & + writeDescriptorSetInlineUniformBlock ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pData ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & xcbSurfaceCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.connection ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & xlibSurfaceCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.dpy ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#endif // 14 <= VULKAN_HPP_CPP_VERSION + +} // namespace std +#endif // VULKAN_HASH_HPP diff --git a/Externals/Vulkan/Include/vulkan/vulkan_ios.h b/Externals/Vulkan/Include/vulkan/vulkan_ios.h index 72ef1a8a8250..57922054393b 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_ios.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_ios.h @@ -2,19 +2,9 @@ #define VULKAN_IOS_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -30,7 +20,7 @@ extern "C" { #define VK_MVK_ios_surface 1 -#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" typedef VkFlags VkIOSSurfaceCreateFlagsMVK; typedef struct VkIOSSurfaceCreateInfoMVK { diff --git a/Externals/Vulkan/Include/vulkan/vulkan_macos.h b/Externals/Vulkan/Include/vulkan/vulkan_macos.h index e6e5deaa3662..8e197c7cff41 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_macos.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_macos.h @@ -2,19 +2,9 @@ #define VULKAN_MACOS_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -30,7 +20,7 @@ extern "C" { #define VK_MVK_macos_surface 1 -#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; typedef struct VkMacOSSurfaceCreateInfoMVK { diff --git a/Externals/Vulkan/Include/vulkan/vulkan_metal.h b/Externals/Vulkan/Include/vulkan/vulkan_metal.h index 3dec68c7713d..3631f1200a54 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_metal.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_metal.h @@ -2,19 +2,9 @@ #define VULKAN_METAL_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Externals/Vulkan/Include/vulkan/vulkan_raii.hpp b/Externals/Vulkan/Include/vulkan/vulkan_raii.hpp new file mode 100644 index 000000000000..a5304301d750 --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_raii.hpp @@ -0,0 +1,18669 @@ +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_RAII_HPP +#define VULKAN_RAII_HPP + +#include + +#if !defined( VULKAN_HPP_RAII_NAMESPACE ) +# define VULKAN_HPP_RAII_NAMESPACE raii +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_RAII_NAMESPACE + { +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !defined( VULKAN_HPP_NO_EXCEPTIONS ) + + template + VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) + { +# if ( 14 <= VULKAN_HPP_CPP_VERSION ) + return std::exchange( obj, std::forward( newValue ) ); +# else + T oldValue = std::move( obj ); + obj = std::forward( newValue ); + return oldValue; +# endif + } + + class ContextDispatcher : public DispatchLoaderBase + { + public: + ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) + : vkGetInstanceProcAddr( getProcAddr ) + //=== VK_VERSION_1_0 === + , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) + , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( + getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) + , vkEnumerateInstanceLayerProperties( + PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + //=== VK_VERSION_1_1 === + , vkEnumerateInstanceVersion( + PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + {} + + public: + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + + //=== VK_VERSION_1_0 === + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + }; + + class InstanceDispatcher : public DispatchLoaderBase + { + public: + InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) + : vkGetInstanceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = + PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = + PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = + PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( + vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = + PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumeratePhysicalDeviceGroups = + PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetPhysicalDeviceFeatures2 = + PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = + PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = + PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = + PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = + PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = + PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = + PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = + PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_utils === + vkCreateDebugUtilsMessengerEXT = + PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = + PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = + PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = + PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = + PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = + PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_sample_locations === + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = + PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( + vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = + PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_device_group === + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( + vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = + PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = + PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = + PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = + PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = + PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( + vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = + PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = + PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = + PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = + PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = + PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +# endif /*VK_USE_PLATFORM_VI_NN*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = + PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = + PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +# else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + + //=== VK_EXT_debug_utils === + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +# else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +# else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_sample_locations === + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +# else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +# else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_device_group === + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; +# else + PFN_dummy vkGetPhysicalDeviceVideoCapabilitiesKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceVideoFormatPropertiesKHR_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +# else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +# endif /*VK_USE_PLATFORM_VI_NN*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +# else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + + //=== VK_NV_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +# else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + }; + + class DeviceDispatcher : public DispatchLoaderBase + { + public: + DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = + PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = + PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = + PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = + PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = + PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = + PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = + PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = + PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = + PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = + PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = + PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = + PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = + PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = + PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = + PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = + PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = + PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = + PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = + PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = + PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = + PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = + PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = + PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = + PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = + PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = + PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( + vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = + PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = + PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = + PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = + PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = + PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = + PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = + PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = + PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = + PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = + PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = + PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = + PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = + PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = + PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = + PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = + PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = + PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = + PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = + PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = + PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = + PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = + PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = + PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = + PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = + PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( + vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = + PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( + vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = + PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = + PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = + PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( + vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( + vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = + PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = + PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = + PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = + PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = + PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = + PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = + PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = + PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = + PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = + PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = + PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = + PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = + PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = + PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = + PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = + PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = + PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = + PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = + PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = + PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = + PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = + PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = + PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = + PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = + PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = + PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = + PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = + PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = + PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = + PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = + PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = + PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = + PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = + PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = + PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( + vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( + vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( + vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( + vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = + PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = + PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = + PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = + PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = + PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( + vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = + PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = + PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = + PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = + PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = + PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = + PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = + PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = + PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = + PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = + PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( + vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( + vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = + PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( + vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = + PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( + vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( + vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = + PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = + PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = + PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = + PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = + PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = + PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = + PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = + PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( + vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = + PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( + vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = + PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = + PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = + PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( + vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( + vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = + PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = + PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = + PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( + vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = + PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = + PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = + PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = + PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = + PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = + PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = + PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( + vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = + PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = + PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( + vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( + vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( + vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = + PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = + PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = + PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = + PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = + PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = + PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = + PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( + vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = + PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = + PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = + PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = + PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = + PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( + vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = + PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = + PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = + PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( + vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = + PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = + PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = + PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( + vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = + PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = + PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = + PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = + PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = + PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = + PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + vkCmdWriteBufferMarker2AMD = + PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + vkGetQueueCheckpointData2NV = + PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = + PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = + PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = + PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( + vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = + PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = + PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = + PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = + PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = + PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = + PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = + PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = + PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = + PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = + PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = + PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = + PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = + PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = + PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = + PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = + PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = + PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = + PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = + PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = + PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = + PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = + PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( + vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = + PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = + PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = + PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( + vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = + PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( + vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorNV = + PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = + PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( + vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = + PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + + //=== VK_VERSION_1_1 === + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + + //=== VK_VERSION_1_2 === + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + + //=== VK_VERSION_1_3 === + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +# else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; + + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +# else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +# else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +# else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; + + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; + + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_bind_memory2 === + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; + + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +# else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +# else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +# else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_fragment_shading_rate === + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; +# else + PFN_dummy vkCmdDecodeVideoKHR_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; +# else + PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; +# else + PFN_dummy vkCreateVideoSessionKHR_placeholder = 0; + PFN_dummy vkDestroyVideoSessionKHR_placeholder = 0; + PFN_dummy vkGetVideoSessionMemoryRequirementsKHR_placeholder = 0; + PFN_dummy vkBindVideoSessionMemoryKHR_placeholder = 0; + PFN_dummy vkCreateVideoSessionParametersKHR_placeholder = 0; + PFN_dummy vkUpdateVideoSessionParametersKHR_placeholder = 0; + PFN_dummy vkDestroyVideoSessionParametersKHR_placeholder = 0; + PFN_dummy vkCmdBeginVideoCodingKHR_placeholder = 0; + PFN_dummy vkCmdEndVideoCodingKHR_placeholder = 0; + PFN_dummy vkCmdControlVideoCodingKHR_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + }; + + //======================================== + //=== RAII HANDLE forward declarations === + //======================================== + + //=== VK_VERSION_1_0 === + class Instance; + class PhysicalDevice; + class Device; + class Queue; + class DeviceMemory; + class Fence; + class Semaphore; + class Event; + class QueryPool; + class Buffer; + class BufferView; + class Image; + class ImageView; + class ShaderModule; + class PipelineCache; + class Pipeline; + class PipelineLayout; + class Sampler; + class DescriptorPool; + class DescriptorSet; + class DescriptorSetLayout; + class Framebuffer; + class RenderPass; + class CommandPool; + class CommandBuffer; + + //=== VK_VERSION_1_1 === + class SamplerYcbcrConversion; + class DescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + class PrivateDataSlot; + + //=== VK_KHR_surface === + class SurfaceKHR; + + //=== VK_KHR_swapchain === + class SwapchainKHR; + + //=== VK_KHR_display === + class DisplayKHR; + class DisplayModeKHR; + + //=== VK_EXT_debug_report === + class DebugReportCallbackEXT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + class VideoSessionKHR; + class VideoSessionParametersKHR; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + class CuModuleNVX; + class CuFunctionNVX; + + //=== VK_EXT_debug_utils === + class DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + class AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + class ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + class AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + class PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + class DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + class IndirectCommandsLayoutNV; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + class BufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //==================== + //=== RAII HANDLES === + //==================== + + class Context + { + public: +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + Context() + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( + m_dynamicLoader.getProcAddress( "vkGetInstanceProcAddr" ) ) ) +# else + Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) ) +# endif + {} + + ~Context() = default; + + Context( Context const & ) = delete; + Context( Context && rhs ) VULKAN_HPP_NOEXCEPT +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + : m_dynamicLoader( std::move( rhs.m_dynamicLoader ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) +# else + : m_dispatcher( rhs.m_dispatcher.release() ) +# endif + {} + Context & operator=( Context const & ) = delete; + Context & operator =( Context && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + m_dynamicLoader = std::move( rhs.m_dynamicLoader ); +# endif + m_dispatcher.reset( rhs.m_dispatcher.release() ); + } + return *this; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance createInstance( + VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector enumerateInstanceExtensionProperties( + Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector enumerateInstanceLayerProperties() const; + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD uint32_t enumerateInstanceVersion() const; + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + private: +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader; +# endif + std::unique_ptr m_dispatcher; + }; + + class Instance + { + public: + using CType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, + VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_allocator( static_cast( allocator ) ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + context.getDispatcher()->vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_instance ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateInstance" ); + } + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( + context.getDispatcher()->vkGetInstanceProcAddr, static_cast( m_instance ) ) ); + } + + Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, + VkInstance instance, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ) + , m_allocator( static_cast( allocator ) ) + { + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( + context.getDispatcher()->vkGetInstanceProcAddr, static_cast( m_instance ) ) ); + } + + Instance( std::nullptr_t ) {} + + ~Instance() + { + if ( m_instance ) + { + getDispatcher()->vkDestroyInstance( static_cast( m_instance ), + reinterpret_cast( m_allocator ) ); + } + } + + Instance() = delete; + Instance( Instance const & ) = delete; + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) + {} + Instance & operator=( Instance const & ) = delete; + Instance & operator =( Instance && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_instance ) + { + getDispatcher()->vkDestroyInstance( static_cast( m_instance ), + reinterpret_cast( m_allocator ) ); + } + m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher.reset( rhs.m_dispatcher.release() ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Instance const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDevices() const; + + VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD std::vector + enumeratePhysicalDeviceGroups() const; + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createDisplayPlaneSurfaceKHR( + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createXlibSurfaceKHR( + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createXcbSurfaceKHR( + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createWaylandSurfaceKHR( + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createAndroidSurfaceKHR( + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createWin32SurfaceKHR( + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT createDebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createStreamDescriptorSurfaceGGP( + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createViSurfaceNN( + VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + VULKAN_HPP_NODISCARD std::vector + enumeratePhysicalDeviceGroupsKHR() const; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createIOSSurfaceMVK( + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createMacOSSurfaceMVK( + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT createDebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void submitDebugUtilsMessageEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createImagePipeSurfaceFUCHSIA( + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createMetalSurfaceEXT( + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createHeadlessSurfaceEXT( + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createDirectFBSurfaceEXT( + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR createScreenSurfaceQNX( + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + std::unique_ptr m_dispatcher; + }; + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkPhysicalDevice physicalDevice ) + : m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() ) + {} + + PhysicalDevice( std::nullptr_t ) {} + + PhysicalDevice() = delete; + PhysicalDevice( PhysicalDevice const & ) = delete; + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + PhysicalDevice & operator=( PhysicalDevice const & ) = delete; + PhysicalDevice & operator =( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties getImageFormatProperties( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device createDevice( + VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector enumerateDeviceExtensionProperties( + Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector enumerateDeviceLayerProperties() const; + + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getFeatures2() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getProperties2() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + template + VULKAN_HPP_NODISCARD StructureChain + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD std::vector getToolProperties() const; + + //=== VK_KHR_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + VULKAN_HPP_NODISCARD std::vector + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector getSurfacePresentModesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD std::vector + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD std::vector getDisplayPropertiesKHR() const; + + VULKAN_HPP_NODISCARD std::vector + getDisplayPlanePropertiesKHR() const; + + VULKAN_HPP_NODISCARD std::vector + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR( + uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR( + uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const; + + template + VULKAN_HPP_NODISCARD StructureChain + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const; + + VULKAN_HPP_NODISCARD std::vector getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_get_physical_device_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + template + VULKAN_HPP_NODISCARD StructureChain getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_external_semaphore_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const + VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy, + RROutput rrOutput ) const; +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + //=== VK_KHR_external_fence_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_performance_query === + + VULKAN_HPP_NODISCARD std::pair, std::vector> + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const; + + VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_get_surface_capabilities2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + template + VULKAN_HPP_NODISCARD StructureChain + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + //=== VK_KHR_get_display_properties2 === + + VULKAN_HPP_NODISCARD std::vector getDisplayProperties2KHR() const; + + VULKAN_HPP_NODISCARD std::vector + getDisplayPlaneProperties2KHR() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const; + + //=== VK_EXT_sample_locations === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsEXT() const; + + //=== VK_KHR_fragment_shading_rate === + + VULKAN_HPP_NODISCARD std::vector + getFragmentShadingRatesKHR() const; + + //=== VK_EXT_tooling_info === + + VULKAN_HPP_NODISCARD std::vector getToolPropertiesEXT() const; + + //=== VK_NV_cooperative_matrix === + + VULKAN_HPP_NODISCARD std::vector + getCooperativeMatrixPropertiesNV() const; + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_NODISCARD std::vector + getSupportedFramebufferMixedSamplesCombinationsNV() const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_NODISCARD std::vector + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_acquire_drm_display === + + void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getWinrtDisplayNV( uint32_t deviceRelativeId ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class PhysicalDevices : public std::vector + { + public: + PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = + instance.getDispatcher(); + std::vector physicalDevices; + uint32_t physicalDeviceCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( dispatcher->vkEnumeratePhysicalDevices( + static_cast( *instance ), &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( dispatcher->vkEnumeratePhysicalDevices( + static_cast( *instance ), &physicalDeviceCount, physicalDevices.data() ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + this->reserve( physicalDeviceCount ); + for ( auto const & physicalDevice : physicalDevices ) + { + this->emplace_back( instance, physicalDevice ); + } + } + else + { + throwResultException( result, "vkEnumeratePhysicalDevices" ); + } + } + + PhysicalDevices() = delete; + PhysicalDevices( PhysicalDevices const & ) = delete; + PhysicalDevices( PhysicalDevices && rhs ) = default; + PhysicalDevices & operator=( PhysicalDevices const & ) = delete; + PhysicalDevices & operator=( PhysicalDevices && rhs ) = default; + }; + + class Device + { + public: + using CType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_allocator( static_cast( allocator ) ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( physicalDevice.getDispatcher()->vkCreateDevice( + static_cast( *physicalDevice ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_device ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDevice" ); + } + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( + physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast( m_device ) ) ); + } + + Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VkDevice device, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ), m_allocator( static_cast( allocator ) ) + { + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( + physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast( m_device ) ) ); + } + + Device( std::nullptr_t ) {} + + ~Device() + { + if ( m_device ) + { + getDispatcher()->vkDestroyDevice( static_cast( m_device ), + reinterpret_cast( m_allocator ) ); + } + } + + Device() = delete; + Device( Device const & ) = delete; + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) + {} + Device & operator=( Device const & ) = delete; + Device & operator =( Device && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_device ) + { + getDispatcher()->vkDestroyDevice( static_cast( m_device ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher.reset( rhs.m_dispatcher.release() ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Device const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex ) const; + + void waitIdle() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory allocateMemory( + VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void + flushMappedMemoryRanges( ArrayProxy const & memoryRanges ) const; + + void invalidateMappedMemoryRanges( + ArrayProxy const & memoryRanges ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence createFence( + VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void resetFences( ArrayProxy const & fences ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore createSemaphore( + VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event createEvent( + VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool createQueryPool( + VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer createBuffer( + VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView createBufferView( + VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image createImage( + VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView createImageView( + VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule createShaderModule( + VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache createPipelineCache( + VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector createComputePipelines( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createComputePipeline( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout createPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler createSampler( + VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout createDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool createDescriptorPool( + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; + + void updateDescriptorSets( + ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer createFramebuffer( + VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass( + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool createCommandPool( + VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; + + //=== VK_VERSION_1_1 === + + void bindBufferMemory2( ArrayProxy const & bindInfos ) const; + + void bindImageMemory2( ArrayProxy const & bindInfos ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue + getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion createSamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate createDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupport( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass2( + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; + + void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t getMemoryOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot createPrivateDataSlot( + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const; + + VULKAN_HPP_NODISCARD uint64_t + getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR createSwapchainKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR + getGroupPresentCapabilitiesKHR() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + VULKAN_HPP_NODISCARD std::pair + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const; + + //=== VK_KHR_display_swapchain === + + VULKAN_HPP_NODISCARD std::vector createSharedSwapchainsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR createSharedSwapchainKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_EXT_debug_marker === + + void debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const; + + void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR createVideoSessionKHR( + VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR createVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NVX_binary_import === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX createCuModuleNVX( + VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX createCuFunctionNVX( + VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD uint32_t + getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VULKAN_HPP_NODISCARD HANDLE + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VULKAN_HPP_NODISCARD int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + void importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const; + + VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + void importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const; + + VULKAN_HPP_NODISCARD int getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const; + + //=== VK_KHR_descriptor_update_template === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate createDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void destroyDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_display_control === + + void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence registerEventEXT( + VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence registerDisplayEventEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_EXT_hdr_metadata === + + void setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + //=== VK_KHR_create_renderpass2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass createRenderPass2KHR( + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + void importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const; + + VULKAN_HPP_NODISCARD HANDLE + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + void importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const; + + VULKAN_HPP_NODISCARD int getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const; + + //=== VK_KHR_performance_query === + + void acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const; + + void releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + void setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const; + + void setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + + template + VULKAN_HPP_NODISCARD StructureChain + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + + VULKAN_HPP_NODISCARD struct AHardwareBuffer * getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_get_memory_requirements2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR createAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & + pBuildRangeInfos ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const; + + template + VULKAN_HPP_NODISCARD std::vector writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const; + + template + VULKAN_HPP_NODISCARD DataType writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_sampler_ycbcr_conversion === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion createSamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void destroySamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_bind_memory2 === + + void bindBufferMemory2KHR( ArrayProxy const & bindInfos ) const; + + void bindImageMemory2KHR( ArrayProxy const & bindInfos ) const; + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT createValidationCacheEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_NV_ray_tracing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV createAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + void bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos ) const; + + VULKAN_HPP_NODISCARD std::vector createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_maintenance3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getDescriptorSetLayoutSupportKHR( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_external_memory_host === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer ) const; + + //=== VK_EXT_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::pair, uint64_t> getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos ) const; + + VULKAN_HPP_NODISCARD std::pair + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const; + + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; + + void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const; + + //=== VK_INTEL_performance_query === + + void initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const; + + void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL + acquirePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const; + + //=== VK_EXT_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR getGroupSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t getMemoryOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR createDeferredOperationKHR( + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV createIndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_EXT_private_data === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot createPrivateDataSlotEXT( + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const; + + VULKAN_HPP_NODISCARD uint64_t + getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_NODISCARD std::vector createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional< + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional< + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + void importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const; + + VULKAN_HPP_NODISCARD zx_handle_t getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA createBufferCollectionFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_NV_external_memory_rdma === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV getMemoryRemoteAddressNV( + const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const; + + //=== VK_KHR_maintenance4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getBufferMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD StructureChain getImageMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + std::unique_ptr m_dispatcher; + }; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + AccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateAccelerationStructureKHR( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateAccelerationStructureKHR" ); + } + } + + AccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkAccelerationStructureKHR accelerationStructure, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_accelerationStructure( accelerationStructure ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + AccelerationStructureKHR( std::nullptr_t ) {} + + ~AccelerationStructureKHR() + { + if ( m_accelerationStructure ) + { + getDispatcher()->vkDestroyAccelerationStructureKHR( + static_cast( m_device ), + static_cast( m_accelerationStructure ), + reinterpret_cast( m_allocator ) ); + } + } + + AccelerationStructureKHR() = delete; + AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete; + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete; + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_accelerationStructure ) + { + getDispatcher()->vkDestroyAccelerationStructureKHR( + static_cast( m_device ), + static_cast( m_accelerationStructure ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_accelerationStructure = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructure; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + AccelerationStructureNV( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateAccelerationStructureNV( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateAccelerationStructureNV" ); + } + } + + AccelerationStructureNV( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkAccelerationStructureNV accelerationStructure, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_accelerationStructure( accelerationStructure ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + AccelerationStructureNV( std::nullptr_t ) {} + + ~AccelerationStructureNV() + { + if ( m_accelerationStructure ) + { + getDispatcher()->vkDestroyAccelerationStructureNV( + static_cast( m_device ), + static_cast( m_accelerationStructure ), + reinterpret_cast( m_allocator ) ); + } + } + + AccelerationStructureNV() = delete; + AccelerationStructureNV( AccelerationStructureNV const & ) = delete; + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete; + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_accelerationStructure ) + { + getDispatcher()->vkDestroyAccelerationStructureNV( + static_cast( m_device ), + static_cast( m_accelerationStructure ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_accelerationStructure = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructure; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD std::vector getHandle( size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getHandle() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Buffer + { + public: + using CType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateBuffer( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_buffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateBuffer" ); + } + } + + Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBuffer buffer, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_buffer( buffer ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Buffer( std::nullptr_t ) {} + + ~Buffer() + { + if ( m_buffer ) + { + getDispatcher()->vkDestroyBuffer( static_cast( m_device ), + static_cast( m_buffer ), + reinterpret_cast( m_allocator ) ); + } + } + + Buffer() = delete; + Buffer( Buffer const & ) = delete; + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Buffer & operator=( Buffer const & ) = delete; + Buffer & operator =( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_buffer ) + { + getDispatcher()->vkDestroyBuffer( static_cast( m_device ), + static_cast( m_buffer ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_buffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Buffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA + { + public: + using CType = VkBufferCollectionFUCHSIA; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + + public: + BufferCollectionFUCHSIA( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_collection ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateBufferCollectionFUCHSIA" ); + } + } + + BufferCollectionFUCHSIA( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_collection( collection ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + BufferCollectionFUCHSIA( std::nullptr_t ) {} + + ~BufferCollectionFUCHSIA() + { + if ( m_collection ) + { + getDispatcher()->vkDestroyBufferCollectionFUCHSIA( + static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( m_allocator ) ); + } + } + + BufferCollectionFUCHSIA() = delete; + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete; + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_collection ) + { + getDispatcher()->vkDestroyBufferCollectionFUCHSIA( + static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_collection = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_collection; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_FUCHSIA_buffer_collection === + + void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const; + + void + setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + class BufferView + { + public: + using CType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateBufferView( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_bufferView ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateBufferView" ); + } + } + + BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBufferView bufferView, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_bufferView( bufferView ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + BufferView( std::nullptr_t ) {} + + ~BufferView() + { + if ( m_bufferView ) + { + getDispatcher()->vkDestroyBufferView( static_cast( m_device ), + static_cast( m_bufferView ), + reinterpret_cast( m_allocator ) ); + } + } + + BufferView() = delete; + BufferView( BufferView const & ) = delete; + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + BufferView & operator=( BufferView const & ) = delete; + BufferView & operator =( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_bufferView ) + { + getDispatcher()->vkDestroyBufferView( static_cast( m_device ), + static_cast( m_bufferView ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_bufferView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::BufferView const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CommandPool + { + public: + using CType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCommandPool( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_commandPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateCommandPool" ); + } + } + + CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCommandPool commandPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_commandPool( commandPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + CommandPool( std::nullptr_t ) {} + + ~CommandPool() + { + if ( m_commandPool ) + { + getDispatcher()->vkDestroyCommandPool( static_cast( m_device ), + static_cast( m_commandPool ), + reinterpret_cast( m_allocator ) ); + } + } + + CommandPool() = delete; + CommandPool( CommandPool const & ) = delete; + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + CommandPool & operator=( CommandPool const & ) = delete; + CommandPool & operator =( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_commandPool ) + { + getDispatcher()->vkDestroyCommandPool( static_cast( m_device ), + static_cast( m_commandPool ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CommandPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_1 === + + void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance1 === + + void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCommandBuffer commandBuffer, + VkCommandPool commandPool ) + : m_device( *device ) + , m_commandPool( commandPool ) + , m_commandBuffer( commandBuffer ) + , m_dispatcher( device.getDispatcher() ) + {} + + CommandBuffer( std::nullptr_t ) {} + + ~CommandBuffer() + { + if ( m_commandBuffer ) + { + getDispatcher()->vkFreeCommandBuffers( static_cast( m_device ), + static_cast( m_commandPool ), + 1, + reinterpret_cast( &m_commandBuffer ) ); + } + } + + CommandBuffer() = delete; + CommandBuffer( CommandBuffer const & ) = delete; + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + CommandBuffer & operator=( CommandBuffer const & ) = delete; + CommandBuffer & operator =( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_commandBuffer ) + { + getDispatcher()->vkFreeCommandBuffers( static_cast( m_device ), + static_cast( m_commandPool ), + 1, + reinterpret_cast( &m_commandBuffer ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ); + m_commandBuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CommandBuffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const; + + void end() const; + + void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + + void setViewport( uint32_t firstViewport, + ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + void setScissor( uint32_t firstScissor, + ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + void setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT; + + void setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT; + + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT; + + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT; + + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference ) const VULKAN_HPP_NOEXCEPT; + + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT; + + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + void bindVertexBuffers( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT; + + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT; + + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions ) const + VULKAN_HPP_NOEXCEPT; + + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions ) const + VULKAN_HPP_NOEXCEPT; + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT; + + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT; + + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + ArrayProxy const & ranges ) const + VULKAN_HPP_NOEXCEPT; + + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges ) const + VULKAN_HPP_NOEXCEPT; + + void + clearAttachments( ArrayProxy const & attachments, + ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT; + + void + resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void waitEvents( ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers ) const + VULKAN_HPP_NOEXCEPT; + + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers ) + const VULKAN_HPP_NOEXCEPT; + + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT; + + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT; + + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT; + + void endRenderPass() const VULKAN_HPP_NOEXCEPT; + + void executeCommands( ArrayProxy const & commandBuffers ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + void setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT; + + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void + beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT; + + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void waitEvents2( ArrayProxy const & events, + ArrayProxy const & dependencyInfos ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const + VULKAN_HPP_NOEXCEPT; + + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const + VULKAN_HPP_NOEXCEPT; + + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRendering() const VULKAN_HPP_NOEXCEPT; + + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT; + + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT; + + void setViewportWithCount( ArrayProxy const & viewports ) const + VULKAN_HPP_NOEXCEPT; + + void setScissorWithCount( ArrayProxy const & scissors ) const + VULKAN_HPP_NOEXCEPT; + + void bindVertexBuffers2( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + ArrayProxy const & strides + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT; + + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT; + + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_marker === + + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const + VULKAN_HPP_NOEXCEPT; + + void debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT; + + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const + VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const + VULKAN_HPP_NOEXCEPT; + + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const + VULKAN_HPP_NOEXCEPT; + + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const + VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_draw_indirect_count === + + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering === + + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRenderingKHR() const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + void setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT; + + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + void pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT; + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_conditional_rendering === + + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & + conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT; + + void endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + void setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy const & viewportWScalings ) + const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_discard_rectangles === + + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_create_renderpass2 === + + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const + VULKAN_HPP_NOEXCEPT; + + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_sample_locations === + + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_acceleration_structure === + + void buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & + pBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const + VULKAN_HPP_NOEXCEPT; + + void copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + void writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_shading_rate_image === + + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; + + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + ArrayProxy const & + shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT; + + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & + customSampleOrders ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_ray_tracing === + + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT; + + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const + VULKAN_HPP_NOEXCEPT; + + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + void writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_draw_indirect_count === + + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT; + + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_INTEL_performance_query === + + void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const; + + void setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const; + + void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const; + + //=== VK_KHR_fragment_shading_rate === + + void setFragmentShadingRateKHR( + const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_line_rasterization === + + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT; + + void + setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT; + + void setViewportWithCountEXT( ArrayProxy const & viewports ) const + VULKAN_HPP_NOEXCEPT; + + void setScissorWithCountEXT( ArrayProxy const & scissors ) const + VULKAN_HPP_NOEXCEPT; + + void bindVertexBuffers2EXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + ArrayProxy const & strides + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + void preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + void executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask + VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; + + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + void setFragmentShadingRateEnumNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const + VULKAN_HPP_NOEXCEPT; + + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const + VULKAN_HPP_NOEXCEPT; + + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void + resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_ray_tracing_pipeline === + + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; + + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_vertex_input_dynamic_state === + + void setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & + vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + void subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_invocation_mask === + + void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state2 === + + void setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT; + + void + setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT; + + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT; + + void + setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + void setColorWriteEnableEXT( ArrayProxy const & colorWriteEnables ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_multi_draw === + + void drawMultiEXT( ArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawMultiIndexedEXT( ArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Optional vertexOffset + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CommandBuffers : public std::vector + { + public: + CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + dispatcher->vkAllocateCommandBuffers( static_cast( *device ), + reinterpret_cast( &allocateInfo ), + commandBuffers.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( allocateInfo.commandBufferCount ); + for ( auto const & commandBuffer : commandBuffers ) + { + this->emplace_back( device, commandBuffer, static_cast( allocateInfo.commandPool ) ); + } + } + else + { + throwResultException( result, "vkAllocateCommandBuffers" ); + } + } + + CommandBuffers() = delete; + CommandBuffers( CommandBuffers const & ) = delete; + CommandBuffers( CommandBuffers && rhs ) = default; + CommandBuffers & operator=( CommandBuffers const & ) = delete; + CommandBuffers & operator=( CommandBuffers && rhs ) = default; + }; + + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + CuFunctionNVX( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateCuFunctionNVX( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateCuFunctionNVX" ); + } + } + + CuFunctionNVX( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCuFunctionNVX function, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_function( function ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + CuFunctionNVX( std::nullptr_t ) {} + + ~CuFunctionNVX() + { + if ( m_function ) + { + getDispatcher()->vkDestroyCuFunctionNVX( static_cast( m_device ), + static_cast( m_function ), + reinterpret_cast( m_allocator ) ); + } + } + + CuFunctionNVX() = delete; + CuFunctionNVX( CuFunctionNVX const & ) = delete; + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete; + CuFunctionNVX & operator =( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_function ) + { + getDispatcher()->vkDestroyCuFunctionNVX( static_cast( m_device ), + static_cast( m_function ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_function = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CuFunctionNVX const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_function; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCuModuleNVX( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateCuModuleNVX" ); + } + } + + CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCuModuleNVX module, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_module( module ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + CuModuleNVX( std::nullptr_t ) {} + + ~CuModuleNVX() + { + if ( m_module ) + { + getDispatcher()->vkDestroyCuModuleNVX( static_cast( m_device ), + static_cast( m_module ), + reinterpret_cast( m_allocator ) ); + } + } + + CuModuleNVX() = delete; + CuModuleNVX( CuModuleNVX const & ) = delete; + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + CuModuleNVX & operator=( CuModuleNVX const & ) = delete; + CuModuleNVX & operator =( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_module ) + { + getDispatcher()->vkDestroyCuModuleNVX( static_cast( m_device ), + static_cast( m_module ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_module = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CuModuleNVX const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_module; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + DebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateDebugReportCallbackEXT( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_callback ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDebugReportCallbackEXT" ); + } + } + + DebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugReportCallbackEXT callback, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_callback( callback ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + {} + + DebugReportCallbackEXT( std::nullptr_t ) {} + + ~DebugReportCallbackEXT() + { + if ( m_callback ) + { + getDispatcher()->vkDestroyDebugReportCallbackEXT( + static_cast( m_instance ), + static_cast( m_callback ), + reinterpret_cast( m_allocator ) ); + } + } + + DebugReportCallbackEXT() = delete; + DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete; + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_callback ) + { + getDispatcher()->vkDestroyDebugReportCallbackEXT( + static_cast( m_instance ), + static_cast( m_callback ), + reinterpret_cast( m_allocator ) ); + } + m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); + m_callback = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_callback; + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_messenger ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDebugUtilsMessengerEXT" ); + } + } + + DebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugUtilsMessengerEXT messenger, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_messenger( messenger ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + {} + + DebugUtilsMessengerEXT( std::nullptr_t ) {} + + ~DebugUtilsMessengerEXT() + { + if ( m_messenger ) + { + getDispatcher()->vkDestroyDebugUtilsMessengerEXT( + static_cast( m_instance ), + static_cast( m_messenger ), + reinterpret_cast( m_allocator ) ); + } + } + + DebugUtilsMessengerEXT() = delete; + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete; + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_messenger ) + { + getDispatcher()->vkDestroyDebugUtilsMessengerEXT( + static_cast( m_instance ), + static_cast( m_messenger ), + reinterpret_cast( m_allocator ) ); + } + m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); + m_messenger = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_messenger; + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DeferredOperationKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateDeferredOperationKHR( + static_cast( *device ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_operation ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDeferredOperationKHR" ); + } + } + + DeferredOperationKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDeferredOperationKHR operation, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_operation( operation ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + DeferredOperationKHR( std::nullptr_t ) {} + + ~DeferredOperationKHR() + { + if ( m_operation ) + { + getDispatcher()->vkDestroyDeferredOperationKHR( + static_cast( m_device ), + static_cast( m_operation ), + reinterpret_cast( m_allocator ) ); + } + } + + DeferredOperationKHR() = delete; + DeferredOperationKHR( DeferredOperationKHR const & ) = delete; + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete; + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_operation ) + { + getDispatcher()->vkDestroyDeferredOperationKHR( + static_cast( m_device ), + static_cast( m_operation ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_operation = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_operation; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_KHR_deferred_host_operations === + + VULKAN_HPP_NODISCARD uint32_t getMaxConcurrency() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getResult() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + DescriptorPool( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateDescriptorPool( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_descriptorPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDescriptorPool" ); + } + } + + DescriptorPool( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_descriptorPool( descriptorPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + DescriptorPool( std::nullptr_t ) {} + + ~DescriptorPool() + { + if ( m_descriptorPool ) + { + getDispatcher()->vkDestroyDescriptorPool( static_cast( m_device ), + static_cast( m_descriptorPool ), + reinterpret_cast( m_allocator ) ); + } + } + + DescriptorPool() = delete; + DescriptorPool( DescriptorPool const & ) = delete; + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DescriptorPool & operator=( DescriptorPool const & ) = delete; + DescriptorPool & operator =( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_descriptorPool ) + { + getDispatcher()->vkDestroyDescriptorPool( static_cast( m_device ), + static_cast( m_descriptorPool ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorSet descriptorSet, + VkDescriptorPool descriptorPool ) + : m_device( *device ) + , m_descriptorPool( descriptorPool ) + , m_descriptorSet( descriptorSet ) + , m_dispatcher( device.getDispatcher() ) + {} + + DescriptorSet( std::nullptr_t ) {} + + ~DescriptorSet() + { + if ( m_descriptorSet ) + { + getDispatcher()->vkFreeDescriptorSets( static_cast( m_device ), + static_cast( m_descriptorPool ), + 1, + reinterpret_cast( &m_descriptorSet ) ); + } + } + + DescriptorSet() = delete; + DescriptorSet( DescriptorSet const & ) = delete; + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DescriptorSet & operator=( DescriptorSet const & ) = delete; + DescriptorSet & operator =( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_descriptorSet ) + { + getDispatcher()->vkFreeDescriptorSets( static_cast( m_device ), + static_cast( m_descriptorPool ), + 1, + reinterpret_cast( &m_descriptorSet ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); + m_descriptorSet = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorSet const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_1 === + + template + void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_descriptor_update_template === + + template + void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorSets : public std::vector + { + public: + DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + dispatcher->vkAllocateDescriptorSets( static_cast( *device ), + reinterpret_cast( &allocateInfo ), + descriptorSets.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( allocateInfo.descriptorSetCount ); + for ( auto const & descriptorSet : descriptorSets ) + { + this->emplace_back( device, descriptorSet, static_cast( allocateInfo.descriptorPool ) ); + } + } + else + { + throwResultException( result, "vkAllocateDescriptorSets" ); + } + } + + DescriptorSets() = delete; + DescriptorSets( DescriptorSets const & ) = delete; + DescriptorSets( DescriptorSets && rhs ) = default; + DescriptorSets & operator=( DescriptorSets const & ) = delete; + DescriptorSets & operator=( DescriptorSets && rhs ) = default; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + DescriptorSetLayout( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateDescriptorSetLayout( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_descriptorSetLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDescriptorSetLayout" ); + } + } + + DescriptorSetLayout( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorSetLayout descriptorSetLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_descriptorSetLayout( descriptorSetLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + DescriptorSetLayout( std::nullptr_t ) {} + + ~DescriptorSetLayout() + { + if ( m_descriptorSetLayout ) + { + getDispatcher()->vkDestroyDescriptorSetLayout( + static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( m_allocator ) ); + } + } + + DescriptorSetLayout() = delete; + DescriptorSetLayout( DescriptorSetLayout const & ) = delete; + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, + {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete; + DescriptorSetLayout & operator =( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_descriptorSetLayout ) + { + getDispatcher()->vkDestroyDescriptorSetLayout( + static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_descriptorSetLayout = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + DescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateDescriptorUpdateTemplate( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_descriptorUpdateTemplate ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDescriptorUpdateTemplate" ); + } + } + + DescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + DescriptorUpdateTemplate( std::nullptr_t ) {} + + ~DescriptorUpdateTemplate() + { + if ( m_descriptorUpdateTemplate ) + { + getDispatcher()->vkDestroyDescriptorUpdateTemplate( + static_cast( m_device ), + static_cast( m_descriptorUpdateTemplate ), + reinterpret_cast( m_allocator ) ); + } + } + + DescriptorUpdateTemplate() = delete; + DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete; + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_descriptorUpdateTemplate ) + { + getDispatcher()->vkDestroyDescriptorUpdateTemplate( + static_cast( m_device ), + static_cast( m_descriptorUpdateTemplate ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_descriptorUpdateTemplate = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + DeviceMemory( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkAllocateMemory( static_cast( *device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_memory ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkAllocateMemory" ); + } + } + + DeviceMemory( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDeviceMemory memory, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_memory( memory ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + DeviceMemory( std::nullptr_t ) {} + + ~DeviceMemory() + { + if ( m_memory ) + { + getDispatcher()->vkFreeMemory( static_cast( m_device ), + static_cast( m_memory ), + reinterpret_cast( m_allocator ) ); + } + } + + DeviceMemory() = delete; + DeviceMemory( DeviceMemory const & ) = delete; + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DeviceMemory & operator=( DeviceMemory const & ) = delete; + DeviceMemory & operator =( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_memory ) + { + getDispatcher()->vkFreeMemory( static_cast( m_device ), + static_cast( m_memory ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_memory = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceMemory const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_memory; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD void * + mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void unmapMemory() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getCommitment() const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VULKAN_HPP_NODISCARD HANDLE + getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_pageable_device_local_memory === + + void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + int32_t drmFd, + uint32_t connectorId ) + : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + physicalDevice.getDispatcher()->vkGetDrmDisplayEXT( static_cast( *physicalDevice ), + drmFd, + connectorId, + reinterpret_cast( &m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkGetDrmDisplayEXT" ); + } + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + Display & dpy, + RROutput rrOutput ) + : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT( + static_cast( *physicalDevice ), + &dpy, + rrOutput, + reinterpret_cast( &m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkGetRandROutputDisplayEXT" ); + } + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + uint32_t deviceRelativeId ) + : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + physicalDevice.getDispatcher()->vkGetWinrtDisplayNV( static_cast( *physicalDevice ), + deviceRelativeId, + reinterpret_cast( &m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkGetWinrtDisplayNV" ); + } + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VkDisplayKHR display ) + : m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() ) + {} + + DisplayKHR( std::nullptr_t ) {} + + ~DisplayKHR() + { + if ( m_display ) + { + getDispatcher()->vkReleaseDisplayEXT( static_cast( m_physicalDevice ), + static_cast( m_display ) ); + } + } + + DisplayKHR() = delete; + DisplayKHR( DisplayKHR const & ) = delete; + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DisplayKHR & operator=( DisplayKHR const & ) = delete; + DisplayKHR & operator =( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_display ) + { + getDispatcher()->vkReleaseDisplayEXT( static_cast( m_physicalDevice ), + static_cast( m_display ) ); + } + m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); + m_display = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DisplayKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_display; + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const + { + return m_physicalDevice; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD std::vector getModeProperties() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR createMode( + VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_get_display_properties2 === + + VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + void acquireWinrtNV() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class DisplayKHRs : public std::vector + { + public: + DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + uint32_t planeIndex ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = + physicalDevice.getDispatcher(); + std::vector displays; + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( *physicalDevice ), planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( *physicalDevice ), planeIndex, &displayCount, displays.data() ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + this->reserve( displayCount ); + for ( auto const & displayKHR : displays ) + { + this->emplace_back( physicalDevice, displayKHR ); + } + } + else + { + throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" ); + } + } + + DisplayKHRs() = delete; + DisplayKHRs( DisplayKHRs const & ) = delete; + DisplayKHRs( DisplayKHRs && rhs ) = default; + DisplayKHRs & operator=( DisplayKHRs const & ) = delete; + DisplayKHRs & operator=( DisplayKHRs && rhs ) = default; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + DisplayModeKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_physicalDevice( display.getPhysicalDevice() ), m_dispatcher( display.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( display.getDispatcher()->vkCreateDisplayModeKHR( + static_cast( display.getPhysicalDevice() ), + static_cast( *display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( + static_cast( allocator ) ), + reinterpret_cast( &m_displayModeKHR ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDisplayModeKHR" ); + } + } + + DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VkDisplayModeKHR displayModeKHR ) + : m_physicalDevice( display.getPhysicalDevice() ) + , m_displayModeKHR( displayModeKHR ) + , m_dispatcher( display.getDispatcher() ) + {} + + DisplayModeKHR( std::nullptr_t ) {} + + DisplayModeKHR() = delete; + DisplayModeKHR( DisplayModeKHR const & ) = delete; + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + DisplayModeKHR & operator=( DisplayModeKHR const & ) = delete; + DisplayModeKHR & operator =( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); + m_displayModeKHR = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DisplayModeKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR + getDisplayPlaneCapabilities( uint32_t planeIndex ) const; + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class Event + { + public: + using CType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateEvent( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_event ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateEvent" ); + } + } + + Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkEvent event, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_event( event ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Event( std::nullptr_t ) {} + + ~Event() + { + if ( m_event ) + { + getDispatcher()->vkDestroyEvent( static_cast( m_device ), + static_cast( m_event ), + reinterpret_cast( m_allocator ) ); + } + } + + Event() = delete; + Event( Event const & ) = delete; + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Event & operator=( Event const & ) = delete; + Event & operator =( Event && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_event ) + { + getDispatcher()->vkDestroyEvent( static_cast( m_device ), + static_cast( m_event ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_event = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Event const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + void set() const; + + void reset() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Event m_event = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Fence + { + public: + using CType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateFence( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateFence" ); + } + } + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkRegisterDeviceEventEXT( + static_cast( *device ), + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkRegisterDeviceEventEXT" ); + } + } + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkRegisterDisplayEventEXT( + static_cast( *device ), + static_cast( *display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkRegisterDisplayEventEXT" ); + } + } + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkFence fence, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_fence( fence ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Fence( std::nullptr_t ) {} + + ~Fence() + { + if ( m_fence ) + { + getDispatcher()->vkDestroyFence( static_cast( m_device ), + static_cast( m_fence ), + reinterpret_cast( m_allocator ) ); + } + } + + Fence() = delete; + Fence( Fence const & ) = delete; + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Fence & operator=( Fence const & ) = delete; + Fence & operator =( Fence && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_fence ) + { + getDispatcher()->vkDestroyFence( static_cast( m_device ), + static_cast( m_fence ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_fence = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Fence const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Fence m_fence = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateFramebuffer( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_framebuffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateFramebuffer" ); + } + } + + Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkFramebuffer framebuffer, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_framebuffer( framebuffer ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Framebuffer( std::nullptr_t ) {} + + ~Framebuffer() + { + if ( m_framebuffer ) + { + getDispatcher()->vkDestroyFramebuffer( static_cast( m_device ), + static_cast( m_framebuffer ), + reinterpret_cast( m_allocator ) ); + } + } + + Framebuffer() = delete; + Framebuffer( Framebuffer const & ) = delete; + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Framebuffer & operator=( Framebuffer const & ) = delete; + Framebuffer & operator =( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_framebuffer ) + { + getDispatcher()->vkDestroyFramebuffer( static_cast( m_device ), + static_cast( m_framebuffer ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_framebuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Framebuffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Image + { + public: + using CType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateImage( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_image ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateImage" ); + } + } + + Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkImage image, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_image( image ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Image( std::nullptr_t ) {} + + ~Image() + { + if ( m_image ) + { + getDispatcher()->vkDestroyImage( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( m_allocator ) ); + } + } + + Image() = delete; + Image( Image const & ) = delete; + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Image & operator=( Image const & ) = delete; + Image & operator =( Image && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_image ) + { + getDispatcher()->vkDestroyImage( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_image = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Image const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getSparseMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout + getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_image_drm_format_modifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT + getDrmFormatModifierPropertiesEXT() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Image m_image = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class ImageView + { + public: + using CType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateImageView( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_imageView ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateImageView" ); + } + } + + ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkImageView imageView, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_imageView( imageView ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + ImageView( std::nullptr_t ) {} + + ~ImageView() + { + if ( m_imageView ) + { + getDispatcher()->vkDestroyImageView( static_cast( m_device ), + static_cast( m_imageView ), + reinterpret_cast( m_allocator ) ); + } + } + + ImageView() = delete; + ImageView( ImageView const & ) = delete; + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + ImageView & operator=( ImageView const & ) = delete; + ImageView & operator =( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_imageView ) + { + getDispatcher()->vkDestroyImageView( static_cast( m_device ), + static_cast( m_imageView ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_imageView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ImageView const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateIndirectCommandsLayoutNV" ); + } + } + + IndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_indirectCommandsLayout( indirectCommandsLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + IndirectCommandsLayoutNV( std::nullptr_t ) {} + + ~IndirectCommandsLayoutNV() + { + if ( m_indirectCommandsLayout ) + { + getDispatcher()->vkDestroyIndirectCommandsLayoutNV( + static_cast( m_device ), + static_cast( m_indirectCommandsLayout ), + reinterpret_cast( m_allocator ) ); + } + } + + IndirectCommandsLayoutNV() = delete; + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete; + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayout( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_indirectCommandsLayout ) + { + getDispatcher()->vkDestroyIndirectCommandsLayoutNV( + static_cast( m_device ), + static_cast( m_indirectCommandsLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_indirectCommandsLayout = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayout; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) + : m_device( *device ), m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( + static_cast( *device ), + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &m_configuration ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" ); + } + } + + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPerformanceConfigurationINTEL configuration ) + : m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() ) + {} + + PerformanceConfigurationINTEL( std::nullptr_t ) {} + + ~PerformanceConfigurationINTEL() + { + if ( m_configuration ) + { + getDispatcher()->vkReleasePerformanceConfigurationINTEL( + static_cast( m_device ), static_cast( m_configuration ) ); + } + } + + PerformanceConfigurationINTEL() = delete; + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete; + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_configuration ) + { + getDispatcher()->vkReleasePerformanceConfigurationINTEL( + static_cast( m_device ), static_cast( m_configuration ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_configuration = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_configuration; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + PipelineCache( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreatePipelineCache( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipelineCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreatePipelineCache" ); + } + } + + PipelineCache( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_pipelineCache( pipelineCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + PipelineCache( std::nullptr_t ) {} + + ~PipelineCache() + { + if ( m_pipelineCache ) + { + getDispatcher()->vkDestroyPipelineCache( static_cast( m_device ), + static_cast( m_pipelineCache ), + reinterpret_cast( m_allocator ) ); + } + } + + PipelineCache() = delete; + PipelineCache( PipelineCache const & ) = delete; + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + PipelineCache & operator=( PipelineCache const & ) = delete; + PipelineCache & operator =( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_pipelineCache ) + { + getDispatcher()->vkDestroyPipelineCache( static_cast( m_device ), + static_cast( m_pipelineCache ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_pipelineCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD std::vector getData() const; + + void merge( ArrayProxy const & srcCaches ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + Pipeline( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( getDispatcher()->vkCreateComputePipelines( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" ); + } + } + + Pipeline( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = + static_cast( getDispatcher()->vkCreateGraphicsPipelines( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" ); + } + } + + Pipeline( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional< + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = + static_cast( getDispatcher()->vkCreateRayTracingPipelinesKHR( + static_cast( *device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" ); + } + } + + Pipeline( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = + static_cast( getDispatcher()->vkCreateRayTracingPipelinesNV( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" ); + } + } + + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipeline pipeline, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( *device ) + , m_pipeline( pipeline ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) + {} + + Pipeline( std::nullptr_t ) {} + + ~Pipeline() + { + if ( m_pipeline ) + { + getDispatcher()->vkDestroyPipeline( static_cast( m_device ), + static_cast( m_pipeline ), + reinterpret_cast( m_allocator ) ); + } + } + + Pipeline() = delete; + Pipeline( Pipeline const & ) = delete; + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Pipeline & operator=( Pipeline const & ) = delete; + Pipeline & operator =( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_pipeline ) + { + getDispatcher()->vkDestroyPipeline( static_cast( m_device ), + static_cast( m_pipeline ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_pipeline = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_AMD_shader_info === + + VULKAN_HPP_NODISCARD std::vector + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const; + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD std::vector + getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const; + + void compileDeferredNV( uint32_t shader ) const; + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_NODISCARD std::vector + getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; + + template + VULKAN_HPP_NODISCARD std::vector getRayTracingCaptureReplayShaderGroupHandlesKHR( + uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, + uint32_t groupCount ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getRayTracingShaderGroupStackSizeKHR( + uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Pipelines : public std::vector + { + public: + Pipelines( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( dispatcher->vkCreateComputePipelines( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + throwResultException( result, "vkCreateComputePipelines" ); + } + } + + Pipelines( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( dispatcher->vkCreateGraphicsPipelines( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + throwResultException( result, "vkCreateGraphicsPipelines" ); + } + } + + Pipelines( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional< + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( dispatcher->vkCreateRayTracingPipelinesKHR( + static_cast( *device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + throwResultException( result, "vkCreateRayTracingPipelinesKHR" ); + } + } + + Pipelines( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( dispatcher->vkCreateRayTracingPipelinesNV( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || + ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + throwResultException( result, "vkCreateRayTracingPipelinesNV" ); + } + } + + Pipelines() = delete; + Pipelines( Pipelines const & ) = delete; + Pipelines( Pipelines && rhs ) = default; + Pipelines & operator=( Pipelines const & ) = delete; + Pipelines & operator=( Pipelines && rhs ) = default; + }; + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + PipelineLayout( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreatePipelineLayout( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipelineLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreatePipelineLayout" ); + } + } + + PipelineLayout( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineLayout pipelineLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_pipelineLayout( pipelineLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + PipelineLayout( std::nullptr_t ) {} + + ~PipelineLayout() + { + if ( m_pipelineLayout ) + { + getDispatcher()->vkDestroyPipelineLayout( static_cast( m_device ), + static_cast( m_pipelineLayout ), + reinterpret_cast( m_allocator ) ); + } + } + + PipelineLayout() = delete; + PipelineLayout( PipelineLayout const & ) = delete; + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + PipelineLayout & operator=( PipelineLayout const & ) = delete; + PipelineLayout & operator =( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_pipelineLayout ) + { + getDispatcher()->vkDestroyPipelineLayout( static_cast( m_device ), + static_cast( m_pipelineLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_pipelineLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PrivateDataSlot( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreatePrivateDataSlot( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_privateDataSlot ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreatePrivateDataSlot" ); + } + } + + PrivateDataSlot( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPrivateDataSlot privateDataSlot, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_privateDataSlot( privateDataSlot ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + PrivateDataSlot( std::nullptr_t ) {} + + ~PrivateDataSlot() + { + if ( m_privateDataSlot ) + { + getDispatcher()->vkDestroyPrivateDataSlot( static_cast( m_device ), + static_cast( m_privateDataSlot ), + reinterpret_cast( m_allocator ) ); + } + } + + PrivateDataSlot() = delete; + PrivateDataSlot( PrivateDataSlot const & ) = delete; + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete; + PrivateDataSlot & operator =( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_privateDataSlot ) + { + getDispatcher()->vkDestroyPrivateDataSlot( static_cast( m_device ), + static_cast( m_privateDataSlot ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_privateDataSlot = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateQueryPool( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_queryPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateQueryPool" ); + } + } + + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkQueryPool queryPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_queryPool( queryPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + QueryPool( std::nullptr_t ) {} + + ~QueryPool() + { + if ( m_queryPool ) + { + getDispatcher()->vkDestroyQueryPool( static_cast( m_device ), + static_cast( m_queryPool ), + reinterpret_cast( m_allocator ) ); + } + } + + QueryPool() = delete; + QueryPool( QueryPool const & ) = delete; + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + QueryPool & operator=( QueryPool const & ) = delete; + QueryPool & operator =( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_queryPool ) + { + getDispatcher()->vkDestroyQueryPool( static_cast( m_device ), + static_cast( m_queryPool ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_queryPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD std::pair> + getResults( uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + template + VULKAN_HPP_NODISCARD std::pair + getResult( uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_2 === + + void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_host_query_reset === + + void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Queue + { + public: + using CType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + uint32_t queueFamilyIndex, + uint32_t queueIndex ) + : m_dispatcher( device.getDispatcher() ) + { + getDispatcher()->vkGetDeviceQueue( + static_cast( *device ), queueFamilyIndex, queueIndex, reinterpret_cast( &m_queue ) ); + } + + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) + : m_dispatcher( device.getDispatcher() ) + { + getDispatcher()->vkGetDeviceQueue2( static_cast( *device ), + reinterpret_cast( &queueInfo ), + reinterpret_cast( &m_queue ) ); + } + + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) + : m_queue( queue ), m_dispatcher( device.getDispatcher() ) + {} + + Queue( std::nullptr_t ) {} + + Queue() = delete; + Queue( Queue const & ) = delete; + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT + : m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Queue & operator=( Queue const & ) = delete; + Queue & operator =( Queue && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + m_queue = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + void submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void waitIdle() const; + + void bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_3 === + + void submit2( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const; + + //=== VK_EXT_debug_utils === + + void + beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV() const VULKAN_HPP_NOEXCEPT; + + //=== VK_INTEL_performance_query === + + void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; + + //=== VK_KHR_synchronization2 === + + void submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Queue m_queue = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class RenderPass + { + public: + using CType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateRenderPass( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateRenderPass" ); + } + } + + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateRenderPass2( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateRenderPass2" ); + } + } + + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkRenderPass renderPass, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_renderPass( renderPass ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + RenderPass( std::nullptr_t ) {} + + ~RenderPass() + { + if ( m_renderPass ) + { + getDispatcher()->vkDestroyRenderPass( static_cast( m_device ), + static_cast( m_renderPass ), + reinterpret_cast( m_allocator ) ); + } + } + + RenderPass() = delete; + RenderPass( RenderPass const & ) = delete; + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + RenderPass & operator=( RenderPass const & ) = delete; + RenderPass & operator =( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_renderPass ) + { + getDispatcher()->vkDestroyRenderPass( static_cast( m_device ), + static_cast( m_renderPass ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_renderPass = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + VULKAN_HPP_NODISCARD std::pair + getSubpassShadingMaxWorkgroupSizeHUAWEI() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Sampler + { + public: + using CType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateSampler( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_sampler ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateSampler" ); + } + } + + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSampler sampler, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_sampler( sampler ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Sampler( std::nullptr_t ) {} + + ~Sampler() + { + if ( m_sampler ) + { + getDispatcher()->vkDestroySampler( static_cast( m_device ), + static_cast( m_sampler ), + reinterpret_cast( m_allocator ) ); + } + } + + Sampler() = delete; + Sampler( Sampler const & ) = delete; + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Sampler & operator=( Sampler const & ) = delete; + Sampler & operator =( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_sampler ) + { + getDispatcher()->vkDestroySampler( static_cast( m_device ), + static_cast( m_sampler ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_sampler = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + SamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateSamplerYcbcrConversion( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_ycbcrConversion ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateSamplerYcbcrConversion" ); + } + } + + SamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSamplerYcbcrConversion ycbcrConversion, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_ycbcrConversion( ycbcrConversion ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + SamplerYcbcrConversion( std::nullptr_t ) {} + + ~SamplerYcbcrConversion() + { + if ( m_ycbcrConversion ) + { + getDispatcher()->vkDestroySamplerYcbcrConversion( + static_cast( m_device ), + static_cast( m_ycbcrConversion ), + reinterpret_cast( m_allocator ) ); + } + } + + SamplerYcbcrConversion() = delete; + SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete; + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_ycbcrConversion ) + { + getDispatcher()->vkDestroySamplerYcbcrConversion( + static_cast( m_device ), + static_cast( m_ycbcrConversion ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_ycbcrConversion = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_ycbcrConversion; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateSemaphore( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_semaphore ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateSemaphore" ); + } + } + + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSemaphore semaphore, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_semaphore( semaphore ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + Semaphore( std::nullptr_t ) {} + + ~Semaphore() + { + if ( m_semaphore ) + { + getDispatcher()->vkDestroySemaphore( static_cast( m_device ), + static_cast( m_semaphore ), + reinterpret_cast( m_allocator ) ); + } + } + + Semaphore() = delete; + Semaphore( Semaphore const & ) = delete; + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + Semaphore & operator=( Semaphore const & ) = delete; + Semaphore & operator =( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_semaphore ) + { + getDispatcher()->vkDestroySemaphore( static_cast( m_device ), + static_cast( m_semaphore ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_semaphore = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_NODISCARD uint64_t getCounterValue() const; + + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class ShaderModule + { + public: + using CType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + ShaderModule( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateShaderModule( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_shaderModule ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateShaderModule" ); + } + } + + ShaderModule( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_shaderModule( shaderModule ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + ShaderModule( std::nullptr_t ) {} + + ~ShaderModule() + { + if ( m_shaderModule ) + { + getDispatcher()->vkDestroyShaderModule( static_cast( m_device ), + static_cast( m_shaderModule ), + reinterpret_cast( m_allocator ) ); + } + } + + ShaderModule() = delete; + ShaderModule( ShaderModule const & ) = delete; + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + ShaderModule & operator=( ShaderModule const & ) = delete; + ShaderModule & operator =( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_shaderModule ) + { + getDispatcher()->vkDestroyShaderModule( static_cast( m_device ), + static_cast( m_shaderModule ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_shaderModule = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateAndroidSurfaceKHR( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateAndroidSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDirectFBSurfaceEXT" ); + } + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" ); + } + } + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateHeadlessSurfaceEXT" ); + } + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateIOSSurfaceMVK( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateIOSSurfaceMVK" ); + } + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" ); + } + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateMacOSSurfaceMVK( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateMacOSSurfaceMVK" ); + } + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateMetalSurfaceEXT( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateMetalSurfaceEXT" ); + } + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateScreenSurfaceQNX( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateScreenSurfaceQNX" ); + } + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_GGP ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" ); + } + } +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateViSurfaceNN( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateViSurfaceNN" ); + } + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateWaylandSurfaceKHR( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateWaylandSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateWin32SurfaceKHR( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateWin32SurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateXcbSurfaceKHR( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateXcbSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( instance.getDispatcher()->vkCreateXlibSurfaceKHR( + static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateXlibSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkSurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_surface( surface ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + {} + + SurfaceKHR( std::nullptr_t ) {} + + ~SurfaceKHR() + { + if ( m_surface ) + { + getDispatcher()->vkDestroySurfaceKHR( static_cast( m_instance ), + static_cast( m_surface ), + reinterpret_cast( m_allocator ) ); + } + } + + SurfaceKHR() = delete; + SurfaceKHR( SurfaceKHR const & ) = delete; + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + SurfaceKHR & operator=( SurfaceKHR const & ) = delete; + SurfaceKHR & operator =( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_surface ) + { + getDispatcher()->vkDestroySurfaceKHR( static_cast( m_instance ), + static_cast( m_surface ), + reinterpret_cast( m_allocator ) ); + } + m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); + m_surface = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_surface; + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + SwapchainKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateSwapchainKHR( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateSwapchainKHR" ); + } + } + + SwapchainKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_swapchain( swapchain ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + SwapchainKHR( std::nullptr_t ) {} + + ~SwapchainKHR() + { + if ( m_swapchain ) + { + getDispatcher()->vkDestroySwapchainKHR( static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( m_allocator ) ); + } + } + + SwapchainKHR() = delete; + SwapchainKHR( SwapchainKHR const & ) = delete; + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + SwapchainKHR & operator=( SwapchainKHR const & ) = delete; + SwapchainKHR & operator =( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_swapchain ) + { + getDispatcher()->vkDestroySwapchainKHR( static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_swapchain = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_swapchain; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD std::vector getImages() const; + + VULKAN_HPP_NODISCARD std::pair + acquireNextImage( uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_EXT_display_control === + + VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const; + + //=== VK_GOOGLE_display_timing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const; + + VULKAN_HPP_NODISCARD std::vector + getPastPresentationTimingGOOGLE() const; + + //=== VK_KHR_shared_presentable_image === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + //=== VK_AMD_display_native_hdr === + + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_present_wait === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + void acquireFullScreenExclusiveModeEXT() const; + + void releaseFullScreenExclusiveModeEXT() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class SwapchainKHRs : public std::vector + { + public: + SwapchainKHRs( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( dispatcher->vkCreateSharedSwapchainsKHR( + static_cast( *device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( + static_cast( allocator ) ), + swapchains.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( createInfos.size() ); + for ( auto const & swapchainKHR : swapchains ) + { + this->emplace_back( device, swapchainKHR, allocator ); + } + } + else + { + throwResultException( result, "vkCreateSharedSwapchainsKHR" ); + } + } + + SwapchainKHRs() = delete; + SwapchainKHRs( SwapchainKHRs const & ) = delete; + SwapchainKHRs( SwapchainKHRs && rhs ) = default; + SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete; + SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + ValidationCacheEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateValidationCacheEXT( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_validationCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateValidationCacheEXT" ); + } + } + + ValidationCacheEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkValidationCacheEXT validationCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_validationCache( validationCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + ValidationCacheEXT( std::nullptr_t ) {} + + ~ValidationCacheEXT() + { + if ( m_validationCache ) + { + getDispatcher()->vkDestroyValidationCacheEXT( + static_cast( m_device ), + static_cast( m_validationCache ), + reinterpret_cast( m_allocator ) ); + } + } + + ValidationCacheEXT() = delete; + ValidationCacheEXT( ValidationCacheEXT const & ) = delete; + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete; + ValidationCacheEXT & operator =( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_validationCache ) + { + getDispatcher()->vkDestroyValidationCacheEXT( + static_cast( m_device ), + static_cast( m_validationCache ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_validationCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_validationCache; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_EXT_validation_cache === + + void merge( ArrayProxy const & srcCaches ) const; + + VULKAN_HPP_NODISCARD std::vector getData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateVideoSessionKHR( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_videoSession ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateVideoSessionKHR" ); + } + } + + VideoSessionKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_videoSession( videoSession ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + VideoSessionKHR( std::nullptr_t ) {} + + ~VideoSessionKHR() + { + if ( m_videoSession ) + { + getDispatcher()->vkDestroyVideoSessionKHR( static_cast( m_device ), + static_cast( m_videoSession ), + reinterpret_cast( m_allocator ) ); + } + } + + VideoSessionKHR() = delete; + VideoSessionKHR( VideoSessionKHR const & ) = delete; + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete; + VideoSessionKHR & operator =( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_videoSession ) + { + getDispatcher()->vkDestroyVideoSessionKHR( static_cast( m_device ), + static_cast( m_videoSession ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_videoSession = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSession; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD std::vector getMemoryRequirements() const; + + void + bindMemory( ArrayProxy const & videoSessionBindMemories ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = + VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateVideoSessionParametersKHR( + static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_videoSessionParameters ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateVideoSessionParametersKHR" ); + } + } + + VideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionParametersKHR videoSessionParameters, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_videoSessionParameters( videoSessionParameters ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + {} + + VideoSessionParametersKHR( std::nullptr_t ) {} + + ~VideoSessionParametersKHR() + { + if ( m_videoSessionParameters ) + { + getDispatcher()->vkDestroyVideoSessionParametersKHR( + static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( m_allocator ) ); + } + } + + VideoSessionParametersKHR() = delete; + VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete; + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSessionParameters( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + {} + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + if ( m_videoSessionParameters ) + { + getDispatcher()->vkDestroyVideoSessionParametersKHR( + static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( m_allocator ) ); + } + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_videoSessionParameters = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParameters; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + //=== VK_KHR_video_queue === + + void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = nullptr; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance Context::createInstance( + VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Instance( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Instance::enumeratePhysicalDevices() const + { + return VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices( *this ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + return features; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), + static_cast( format ), + reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + } + return imageFormatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + getDispatcher()->vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties() const VULKAN_HPP_NOEXCEPT + { + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( + static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( + static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount == queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction + Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + return getDispatcher()->vkGetInstanceProcAddr( static_cast( m_instance ), name.c_str() ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction + Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + return getDispatcher()->vkGetDeviceProcAddr( static_cast( m_device ), name.c_str() ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device PhysicalDevice::createDevice( + VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Device( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Context::enumerateInstanceExtensionProperties( Optional layerName ) const + { + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName ) const + { + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast( m_physicalDevice ), + layerName ? layerName->c_str() : nullptr, + &propertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Context::enumerateInstanceLayerProperties() const + { + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateInstanceLayerProperties( + &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::enumerateDeviceLayerProperties() const + { + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueFamilyIndex, queueIndex ); + } + + VULKAN_HPP_INLINE void Queue::submit( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueueSubmit( static_cast( m_queue ), + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + } + } + + VULKAN_HPP_INLINE void Queue::waitIdle() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueueWaitIdle( static_cast( m_queue ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + } + } + + VULKAN_HPP_INLINE void Device::waitIdle() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkDeviceWaitIdle( static_cast( m_device ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory Device::allocateMemory( + VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, allocateInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * + DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const + { + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMapMemory( static_cast( m_device ), + static_cast( m_memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); + } + return pData; + } + + VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkUnmapMemory( static_cast( m_device ), static_cast( m_memory ) ); + } + + VULKAN_HPP_INLINE void Device::flushMappedMemoryRanges( + ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkFlushMappedMemoryRanges( + static_cast( m_device ), + memoryRanges.size(), + reinterpret_cast( memoryRanges.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } + } + + VULKAN_HPP_INLINE void Device::invalidateMappedMemoryRanges( + ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkInvalidateMappedMemoryRanges( + static_cast( m_device ), + memoryRanges.size(), + reinterpret_cast( memoryRanges.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + getDispatcher()->vkGetDeviceMemoryCommitment( static_cast( m_device ), + static_cast( m_memory ), + reinterpret_cast( &committedMemoryInBytes ) ); + return committedMemoryInBytes; + } + + VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindBufferMemory( static_cast( m_device ), + static_cast( m_buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); + } + } + + VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindImageMemory( static_cast( m_device ), + static_cast( m_image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements( static_cast( m_device ), + static_cast( m_buffer ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Image::getSparseMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), &sparseMemoryRequirementCount, nullptr ); + std::vector sparseMemoryRequirements( + sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements( + static_cast( m_device ), + static_cast( m_image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const + VULKAN_HPP_NOEXCEPT + { + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + std::vector properties( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount == properties.size() ); + return properties; + } + + VULKAN_HPP_INLINE void Queue::bindSparse( ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueueBindSparse( static_cast( m_queue ), + bindInfo.size(), + reinterpret_cast( bindInfo.data() ), + static_cast( fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::createFence( + VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::resetFences( ArrayProxy const & fences ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetFences( + static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetFenceStatus( static_cast( m_device ), static_cast( m_fence ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus" ); + } + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitForFences( ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitForFences( static_cast( m_device ), + fences.size(), + reinterpret_cast( fences.data() ), + static_cast( waitAll ), + timeout ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences" ); + } + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore Device::createSemaphore( + VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event Device::createEvent( + VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Event( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetEventStatus( static_cast( m_device ), static_cast( m_event ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eEventSet ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eEventReset ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus" ); + } + return result; + } + + VULKAN_HPP_INLINE void Event::set() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetEvent( static_cast( m_device ), static_cast( m_event ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); + } + } + + VULKAN_HPP_INLINE void Event::reset() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkResetEvent( static_cast( m_device ), static_cast( m_event ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool Device::createQueryPool( + VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, createInfo, allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + QueryPool::getResults( uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = + static_cast( getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults" ); + } + return std::make_pair( result, data ); + } + + template + VULKAN_HPP_NODISCARD std::pair + QueryPool::getResult( uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + DataType data; + Result result = + static_cast( getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult" ); + } + return std::make_pair( result, data ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer Device::createBuffer( + VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView Device::createBufferView( + VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image Device::createImage( + VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Image( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Image::getSubresourceLayout( + const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + getDispatcher()->vkGetImageSubresourceLayout( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + return layout; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView Device::createImageView( + VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule Device::createShaderModule( + VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache Device::createPipelineCache( + VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PipelineCache::getData() const + { + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineCacheData( + static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + getDispatcher()->vkGetPipelineCacheData( static_cast( m_device ), + static_cast( m_pipelineCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + } + return data; + } + + VULKAN_HPP_INLINE void + PipelineCache::merge( ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMergePipelineCaches( static_cast( m_device ), + static_cast( m_pipelineCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createComputePipeline( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout Device::createPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler Device::createSampler( + VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout + Device::createDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool Device::createDescriptorPool( + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void + DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkResetDescriptorPool( static_cast( m_device ), + static_cast( m_descriptorPool ), + static_cast( flags ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorSets( *this, allocateInfo ); + } + + VULKAN_HPP_INLINE void Device::updateDescriptorSets( + ArrayProxy const & descriptorWrites, + ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkUpdateDescriptorSets( + static_cast( m_device ), + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer Device::createFramebuffer( + VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass( + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderAreaGranularity( static_cast( m_device ), + static_cast( m_renderPass ), + reinterpret_cast( &granularity ) ); + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool Device::createCommandPool( + VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkResetCommandPool( static_cast( m_device ), + static_cast( m_commandPool ), + static_cast( flags ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CommandBuffers( *this, allocateInfo ); + } + + VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBeginCommandBuffer( static_cast( m_commandBuffer ), + reinterpret_cast( &beginInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::end() const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkResetCommandBuffer( + static_cast( m_commandBuffer ), static_cast( flags ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBindPipeline( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( pipeline ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewport( + uint32_t firstViewport, + ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetViewport( static_cast( m_commandBuffer ), + firstViewport, + viewports.size(), + reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setScissor( + uint32_t firstScissor, ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetScissor( static_cast( m_commandBuffer ), + firstScissor, + scissors.size(), + reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthBias( static_cast( m_commandBuffer ), + depthBiasConstantFactor, + depthBiasClamp, + depthBiasSlopeFactor ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, + float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthBounds( + static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetStencilCompareMask( + static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetStencilWriteMask( + static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetStencilReference( + static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + ArrayProxy const & descriptorSets, + ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDraw( + static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDrawIndexed( static_cast( m_commandBuffer ), + indexCount, + instanceCount, + firstIndex, + vertexOffset, + firstInstance ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDrawIndirect( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + drawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDrawIndexedIndirect( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + drawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDispatch( + static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDispatchIndirect( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyBuffer( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImage( + VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBlitImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( + VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdClearColorImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdClearDepthStencilImage( + static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( + ArrayProxy const & attachments, + ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdClearAttachments( static_cast( m_commandBuffer ), + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( + VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdResolveImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetEvent( static_cast( m_commandBuffer ), + static_cast( event ), + static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdResetEvent( static_cast( m_commandBuffer ), + static_cast( event ), + static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( + ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdWaitEvents( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + ArrayProxy const & memoryBarriers, + ArrayProxy const & bufferMemoryBarriers, + ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdPipelineBarrier( + static_cast( m_commandBuffer ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBeginQuery( static_cast( m_commandBuffer ), + static_cast( queryPool ), + query, + static_cast( flags ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdEndQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdResetQueryPool( static_cast( m_commandBuffer ), + static_cast( queryPool ), + firstQuery, + queryCount ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdWriteTimestamp( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( queryPool ), + query ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdNextSubpass( static_cast( m_commandBuffer ), + static_cast( contents ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( + ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdExecuteCommands( static_cast( m_commandBuffer ), + commandBuffers.size(), + reinterpret_cast( commandBuffers.data() ) ); + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const + { + uint32_t apiVersion; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); + } + return apiVersion; + } + + VULKAN_HPP_INLINE void + Device::bindBufferMemory2( ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindBufferMemory2( static_cast( m_device ), + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } + } + + VULKAN_HPP_INLINE void + Device::bindImageMemory2( ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindImageMemory2( static_cast( m_device ), + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( + static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } + + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDispatchBase( static_cast( m_commandBuffer ), + baseGroupX, + baseGroupY, + baseGroupZ, + groupCountX, + groupCountY, + groupCountZ ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Instance::enumeratePhysicalDeviceGroups() const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceGroups( + static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceGroups( + static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + } + return physicalDeviceGroupProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + std::vector sparseMemoryRequirements( + sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), + static_cast( format ), + reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), + static_cast( format ), + reinterpret_cast( &formatProperties ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( + static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } + return imageFormatProperties; + } + + template + VULKAN_HPP_NODISCARD StructureChain PhysicalDevice::getImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( + static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + } + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2() const VULKAN_HPP_NOEXCEPT + { + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( + static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( + static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount == queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( + static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( + static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( + static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( + static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT + { + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( + static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + std::vector properties( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( + static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount == properties.size() ); + return properties; + } + + VULKAN_HPP_INLINE void + CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkTrimCommandPool( static_cast( m_device ), + static_cast( m_commandPool ), + static_cast( flags ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue + Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueInfo ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion + Device::createSamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate + Device::createDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator ); + } + + template + VULKAN_HPP_INLINE void + DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkUpdateDescriptorSetWithTemplate( + static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & + externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupport( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupport( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass2( + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( + const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdEndRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkResetQueryPool( + static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const + { + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSemaphoreCounterValue( + static_cast( m_device ), static_cast( m_semaphore ), &value ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); + } + return value; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWaitSemaphores( + static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores" ); + } + return result; + } + + VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSignalSemaphore( + static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + return static_cast( getDispatcher()->vkGetBufferDeviceAddress( + static_cast( m_device ), reinterpret_cast( &info ) ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + return getDispatcher()->vkGetBufferOpaqueCaptureAddress( + static_cast( m_device ), reinterpret_cast( &info ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( + const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + return getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( + static_cast( m_device ), reinterpret_cast( &info ) ); + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getToolProperties() const + { + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + } + return toolProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot Device::createPrivateDataSlot( + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetPrivateData( static_cast( m_device ), + static_cast( objectType_ ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + uint64_t data; + getDispatcher()->vkGetPrivateData( static_cast( m_device ), + static_cast( objectType_ ), + objectHandle, + static_cast( privateDataSlot ), + &data ); + return data; + } + + VULKAN_HPP_INLINE void + CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetEvent2( static_cast( m_commandBuffer ), + static_cast( event ), + reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdResetEvent2( static_cast( m_commandBuffer ), + static_cast( event ), + static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2( ArrayProxy const & events, + ArrayProxy const & dependencyInfos ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdWaitEvents2( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), + reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdWriteTimestamp2( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( queryPool ), + query ); + } + + VULKAN_HPP_INLINE void Queue::submit2( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueueSubmit2( static_cast( m_queue ), + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( + const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyBuffer2( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyImage2( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( + const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyBufferToImage2( + static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( + const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdCopyImageToBuffer2( + static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBlitImage2( static_cast( m_commandBuffer ), + reinterpret_cast( &blitImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( + const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdResolveImage2( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( + const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdBeginRendering( static_cast( m_commandBuffer ), + reinterpret_cast( &renderingInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdEndRendering( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), + static_cast( cullMode ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), + static_cast( frontFace ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( + VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), + static_cast( primitiveTopology ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( + ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), + viewports.size(), + reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( + ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), + scissors.size(), + reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + ArrayProxy const & strides ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), + static_cast( depthTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), + static_cast( depthWriteEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), + static_cast( depthCompareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), + static_cast( depthBoundsTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), + static_cast( stencilTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), + static_cast( rasterizerDiscardEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), + static_cast( depthBiasEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), + static_cast( primitiveRestartEnable ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirements( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirements( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirements( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirements( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + VULKAN_HPP_NOEXCEPT + { + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + std::vector sparseMemoryRequirements( + sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + //=== VK_KHR_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Bool32 supported; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( static_cast( m_physicalDevice ), + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + } + return supported; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } + return surfaceCapabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && + "Function needs extension enabled!" ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( + static_cast( m_physicalDevice ), + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + } + return surfaceFormats; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR && + "Function needs extension enabled!" ); + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &presentModeCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( + static_cast( m_physicalDevice ), + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + } + return presentModes; + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR Device::createSwapchainKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getImages() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && + "Function needs extension enabled!" ); + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetSwapchainImagesKHR( static_cast( m_device ), + static_cast( m_swapchain ), + &swapchainImageCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( + getDispatcher()->vkGetSwapchainImagesKHR( static_cast( m_device ), + static_cast( m_swapchain ), + &swapchainImageCount, + swapchainImages.data() ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + } + return swapchainImages; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + SwapchainKHR::acquireNextImage( uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && + "Function needs extension enabled!" ); + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( m_swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage" ); + } + return std::make_pair( result, imageIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkQueuePresentKHR( + static_cast( m_queue ), reinterpret_cast( &presentInfo ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR" ); + } + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR + Device::getGroupPresentCapabilitiesKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), + reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + } + return deviceGroupPresentCapabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( + static_cast( m_device ), + static_cast( surface ), + reinterpret_cast( &modes ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + } + return modes; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR && + "Function needs extension enabled!" ); + + std::vector rects; + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &rectCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + } + return rects; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && + "Function needs extension enabled!" ); + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireNextImage2KHR( static_cast( m_device ), + reinterpret_cast( &acquireInfo ), + &imageIndex ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eNotReady ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR" ); + } + return std::make_pair( result, imageIndex ); + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getDisplayPropertiesKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getDisplayPlanePropertiesKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs( *this, planeIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + DisplayKHR::getModeProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetDisplayModePropertiesKHR( + static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR DisplayKHR::createMode( + VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR + DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( + static_cast( m_physicalDevice ), + static_cast( m_displayModeKHR ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); + } + return capabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createDisplayPlaneSurfaceKHR( + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + //=== VK_KHR_display_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createSharedSwapchainsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs( *this, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR Device::createSharedSwapchainKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createXlibSurfaceKHR( + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXlibPresentationSupportKHR( + uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function needs extension enabled!" ); + + return static_cast( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &dpy, visualID ) ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createXcbSurfaceKHR( + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( + uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function needs extension enabled!" ); + + return static_cast( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &connection, visual_id ) ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createWaylandSurfaceKHR( + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function needs extension enabled!" ); + + return static_cast( + getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &display ) ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createAndroidSurfaceKHR( + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createWin32SurfaceKHR( + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR && + "Function needs extension enabled!" ); + + return static_cast( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex ) ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT + Instance::createDebugReportCallbackEXT( + VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkDebugReportMessageEXT( static_cast( m_instance ), + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } + + //=== VK_EXT_debug_marker === + + VULKAN_HPP_INLINE void + Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkDebugMarkerSetObjectTagEXT( + static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } + } + + VULKAN_HPP_INLINE void + Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkDebugMarkerSetObjectNameEXT( + static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( + const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } + return capabilities; + } + + template + VULKAN_HPP_NODISCARD StructureChain + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR & videoProfile ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + } + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function needs extension enabled!" ); + + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + } + return videoFormatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR Device::createVideoSessionKHR( + VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + VideoSessionKHR::getMemoryRequirements() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR && + "Function needs extension enabled!" ); + + std::vector videoSessionMemoryRequirements; + uint32_t videoSessionMemoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), + static_cast( m_videoSession ), + &videoSessionMemoryRequirementsCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoSessionMemoryRequirementsCount ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + result = static_cast( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( + static_cast( m_device ), + static_cast( m_videoSession ), + &videoSessionMemoryRequirementsCount, + reinterpret_cast( videoSessionMemoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::getMemoryRequirements" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() ); + if ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) + { + videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount ); + } + } + return videoSessionMemoryRequirements; + } + + VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory( + ArrayProxy const & videoSessionBindMemories ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindVideoSessionMemoryKHR( + static_cast( m_device ), + static_cast( m_videoSession ), + videoSessionBindMemories.size(), + reinterpret_cast( videoSessionBindMemories.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR + Device::createVideoSessionParametersKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkUpdateVideoSessionParametersKHR( + static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( + const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &beginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( + const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &endCodingInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( + const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdControlVideoCodingKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &codingControlInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( + const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &frameInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_transform_feedback === + + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBeginTransformFeedbackEXT && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBeginTransformFeedbackEXT( + static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( + uint32_t firstCounterBuffer, + ArrayProxy const & counterBuffers, + ArrayProxy const & counterBufferOffsets ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdEndTransformFeedbackEXT && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdEndTransformFeedbackEXT( + static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBeginQueryIndexedEXT( static_cast( m_commandBuffer ), + static_cast( queryPool ), + query, + static_cast( flags ), + index ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEndQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndirectByteCountEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX Device::createCuModuleNVX( + VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX Device::createCuFunctionNVX( + VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( + const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), + reinterpret_cast( &launchInfo ) ); + } + + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( + const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && + "Function needs extension enabled!" ); + + return getDispatcher()->vkGetImageViewHandleNVX( static_cast( m_device ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX + ImageView::getAddressNVX() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetImageViewAddressNVX( + static_cast( m_device ), + static_cast( m_imageView ), + reinterpret_cast( &properties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); + } + return properties; + } + + //=== VK_AMD_draw_indirect_count === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && + "Function needs extension enabled!" ); + + std::vector info; + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( + getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + } + return info; + } + + //=== VK_KHR_dynamic_rendering === + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( + const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &renderingInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); + } + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createStreamDescriptorSurfaceGGP( + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV + PhysicalDevice::getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } + return externalImageFormatProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && + "Function needs extension enabled!" ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryWin32HandleNV( static_cast( m_device ), + static_cast( m_memory ), + static_cast( handleType ), + &handle ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); + } + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), + static_cast( format ), + reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), + static_cast( format ), + reinterpret_cast( &formatProperties ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } + return imageFormatProperties; + } + + template + VULKAN_HPP_NODISCARD StructureChain PhysicalDevice::getImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + } + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function needs extension enabled!" ); + + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount == queueFamilyProperties.size() ); + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function needs extension enabled!" ); + + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + std::vector returnVector( queueFamilyPropertyCount ); + std::vector queueFamilyProperties( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = + returnVector[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + returnVector[i].template get() = queueFamilyProperties[i]; + } + return returnVector; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function needs extension enabled!" ); + + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + std::vector properties( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + VULKAN_HPP_ASSERT( propertyCount == properties.size() ); + return properties; + } + + //=== VK_KHR_device_group === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( + static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } + + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDispatchBaseKHR( static_cast( m_commandBuffer ), + baseGroupX, + baseGroupY, + baseGroupZ, + groupCountX, + groupCountY, + groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createViSurfaceNN( + VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + VULKAN_HPP_INLINE void + CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkTrimCommandPoolKHR( static_cast( m_device ), + static_cast( m_commandPool ), + static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Instance::enumeratePhysicalDeviceGroupsKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR && + "Function needs extension enabled!" ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( + static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( + static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + } + return physicalDeviceGroupProperties; + } + + //=== VK_KHR_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE Device::getMemoryWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryWin32HandleKHR && + "Function needs extension enabled!" ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryWin32HandleKHR( + static_cast( m_device ), + reinterpret_cast( &getWin32HandleInfo ), + &handle ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + } + return handle; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( + static_cast( m_device ), + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + } + return memoryWin32HandleProperties; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int + Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && + "Function needs extension enabled!" ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryFdKHR( + static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + } + return fd; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryFdPropertiesKHR( + static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + } + return memoryFdProperties; + } + + //=== VK_KHR_external_semaphore_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VULKAN_HPP_INLINE void Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkImportSemaphoreWin32HandleKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), + reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetSemaphoreWin32HandleKHR && + "Function needs extension enabled!" ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), + reinterpret_cast( &getWin32HandleInfo ), + &handle ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + } + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VULKAN_HPP_INLINE void + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkImportSemaphoreFdKHR( + static_cast( m_device ), + reinterpret_cast( &importSemaphoreFdInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && + "Function needs extension enabled!" ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSemaphoreFdKHR( + static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + } + return fd; + } + + //=== VK_KHR_push_descriptor === + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdPushDescriptorSetKHR( + static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( + static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } + + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBeginConditionalRenderingEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBeginConditionalRenderingEXT( + static_cast( m_commandBuffer ), + reinterpret_cast( &conditionalRenderingBegin ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdEndConditionalRenderingEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); + } + + //=== VK_KHR_descriptor_update_template === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate + Device::createDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR( + static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( + static_cast( allocator ) ) ); + } + + template + VULKAN_HPP_INLINE void + DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( + static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + //=== VK_NV_clip_space_w_scaling === + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + ArrayProxy const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetViewportWScalingNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetViewportWScalingNV( + static_cast( m_commandBuffer ), + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireXlibDisplayEXT( + static_cast( m_physicalDevice ), &dpy, static_cast( display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, dpy, rrOutput ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( + static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } + return surfaceCapabilities; + } + + //=== VK_EXT_display_control === + + VULKAN_HPP_INLINE void + Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkDisplayPowerControlEXT( + static_cast( m_device ), + static_cast( display ), + reinterpret_cast( &displayPowerInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::registerEventEXT( + VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, deviceEventInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence Device::registerDisplayEventEXT( + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, display, displayEventInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && + "Function needs extension enabled!" ); + + uint64_t counterValue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSwapchainCounterEXT( static_cast( m_device ), + static_cast( m_swapchain ), + static_cast( counter ), + &counterValue ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); + } + return counterValue; + } + + //=== VK_GOOGLE_display_timing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE + SwapchainKHR::getRefreshCycleDurationGOOGLE() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetRefreshCycleDurationGOOGLE && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRefreshCycleDurationGOOGLE( + static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( &displayTimingProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); + } + return displayTimingProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + SwapchainKHR::getPastPresentationTimingGOOGLE() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPastPresentationTimingGOOGLE && + "Function needs extension enabled!" ); + + std::vector presentationTimings; + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), + static_cast( m_swapchain ), + &presentationTimingCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( getDispatcher()->vkGetPastPresentationTimingGOOGLE( + static_cast( m_device ), + static_cast( m_swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + } + return presentationTimings; + } + + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( + uint32_t firstDiscardRectangle, + ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetDiscardRectangleEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast( m_commandBuffer ), + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); + } + + //=== VK_EXT_hdr_metadata === + + VULKAN_HPP_INLINE void + Device::setHdrMetadataEXT( ArrayProxy const & swapchains, + ArrayProxy const & metadata ) const + VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkSetHdrMetadataEXT( static_cast( m_device ), + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } + + //=== VK_KHR_create_renderpass2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass Device::createRenderPass2KHR( + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( + const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassEndInfo ) ); + } + + //=== VK_KHR_shared_presentable_image === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetSwapchainStatusKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSwapchainStatusKHR( + static_cast( m_device ), static_cast( m_swapchain ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus" ); + } + return result; + } + + //=== VK_KHR_external_fence_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkImportFenceWin32HandleKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkImportFenceWin32HandleKHR( + static_cast( m_device ), + reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE Device::getFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && + "Function needs extension enabled!" ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetFenceWin32HandleKHR( + static_cast( m_device ), + reinterpret_cast( &getWin32HandleInfo ), + &handle ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + } + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VULKAN_HPP_INLINE void + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkImportFenceFdKHR( + static_cast( m_device ), reinterpret_cast( &importFenceFdInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int + Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && + "Function needs extension enabled!" ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetFenceFdKHR( + static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + } + return fd; + } + + //=== VK_KHR_performance_query === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::pair, std::vector> + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function needs extension enabled!" ); + + std::pair, std::vector> data; + std::vector & counters = data.first; + std::vector & counterDescriptions = data.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( counterCount < counters.size() ) ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( + result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + } + return data; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function needs extension enabled!" ); + + uint32_t numPasses; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); + return numPasses; + } + + VULKAN_HPP_INLINE void + Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireProfilingLockKHR( + static_cast( m_device ), reinterpret_cast( &info ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + } + } + + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkReleaseProfilingLockKHR( static_cast( m_device ) ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + PhysicalDevice::getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + return surfaceCapabilities; + } + + template + VULKAN_HPP_NODISCARD StructureChain PhysicalDevice::getSurfaceCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function needs extension enabled!" ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + } + return surfaceFormats; + } + + //=== VK_KHR_get_display_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getDisplayProperties2KHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getDisplayPlaneProperties2KHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + DisplayKHR::getModeProperties2() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + PhysicalDevice::getDisplayPlaneCapabilities2KHR( + const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDisplayPlaneCapabilities2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + } + return capabilities; + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createIOSSurfaceMVK( + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createMacOSSurfaceMVK( + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_INLINE void + Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSetDebugUtilsObjectNameEXT( + static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + } + } + + VULKAN_HPP_INLINE void + Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSetDebugUtilsObjectTagEXT( + static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + } + } + + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); + } + + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT + Instance::createDebugUtilsMessengerEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkSubmitDebugUtilsMessageEXT( + static_cast( m_instance ), + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), + &buffer, + reinterpret_cast( &properties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + return properties; + } + + template + VULKAN_HPP_NODISCARD StructureChain + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), + &buffer, + reinterpret_cast( &properties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer * Device::getMemoryAndroidHardwareBufferANDROID( + const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID && + "Function needs extension enabled!" ); + + struct AHardwareBuffer * buffer; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), + reinterpret_cast( &info ), + &buffer ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + } + return buffer; + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( + const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetSampleLocationsEXT( + static_cast( m_commandBuffer ), + reinterpret_cast( &sampleLocationsInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( + static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } + + //=== VK_KHR_get_memory_requirements2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2KHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2KHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2KHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2KHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( + const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR && + "Function needs extension enabled!" ); + + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + std::vector sparseMemoryRequirements( + sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR + Device::createAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos ) + const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBuildAccelerationStructuresKHR && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBuildAccelerationStructuresKHR( + static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + ArrayProxy const & infos, + ArrayProxy const & indirectDeviceAddresses, + ArrayProxy const & indirectStrides, + ArrayProxy const & pMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( + static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + ArrayProxy const & infos, + ArrayProxy const & pBuildRangeInfos ) + const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkBuildAccelerationStructuresKHR && + "Function needs extension enabled!" ); + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBuildAccelerationStructuresKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR" ); + } + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCopyAccelerationStructureKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCopyAccelerationStructureKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR" ); + } + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyAccelerationStructureToMemoryKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCopyAccelerationStructureToMemoryKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR" ); + } + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCopyMemoryToAccelerationStructureKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR" ); + } + return result; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( + static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD DataType Device::writeAccelerationStructuresPropertyKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const + { + DataType data; + Result result = static_cast( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( + static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + } + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdCopyAccelerationStructureKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyAccelerationStructureKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR && + "Function needs extension enabled!" ); + + return static_cast( + getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), + reinterpret_cast( &info ) ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( + static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( + static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + return compatibility; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy const & maxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetAccelerationStructureBuildSizesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( + static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + return sizeInfo; + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion + Device::createSamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkDestroySamplerYcbcrConversionKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkDestroySamplerYcbcrConversionKHR( + static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( + static_cast( allocator ) ) ); + } + + //=== VK_KHR_bind_memory2 === + + VULKAN_HPP_INLINE void Device::bindBufferMemory2KHR( + ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindBufferMemory2KHR( + static_cast( m_device ), + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } + } + + VULKAN_HPP_INLINE void + Device::bindImageMemory2KHR( ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindImageMemory2KHR( static_cast( m_device ), + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + } + } + + //=== VK_EXT_image_drm_format_modifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT + Image::getDrmFormatModifierPropertiesEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( + static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &properties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); + } + return properties; + } + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT + Device::createValidationCacheEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void + ValidationCacheEXT::merge( ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkMergeValidationCachesEXT( + static_cast( m_device ), + static_cast( m_validationCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ValidationCacheEXT::getData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && + "Function needs extension enabled!" ); + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetValidationCacheDataEXT( static_cast( m_device ), + static_cast( m_validationCache ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + getDispatcher()->vkGetValidationCacheDataEXT( static_cast( m_device ), + static_cast( m_validationCache ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + } + return data; + } + + //=== VK_NV_shading_rate_image === + + VULKAN_HPP_INLINE void + CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBindShadingRateImageNV( static_cast( m_commandBuffer ), + static_cast( imageView ), + static_cast( imageLayout ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, ArrayProxy const & shadingRatePalettes ) + const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetViewportShadingRatePaletteNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( + static_cast( m_commandBuffer ), + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy const & customSampleOrders ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetCoarseSampleOrderNV( + static_cast( m_commandBuffer ), + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } + + //=== VK_NV_ray_tracing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV + Device::createAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV( + ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkBindAccelerationStructureMemoryNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), + bindInfos.size(), + reinterpret_cast( bindInfos.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBuildAccelerationStructureNV( + static_cast( m_commandBuffer ), + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdTraceRaysNV( static_cast( m_commandBuffer ), + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( + getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, + uint32_t groupCount ) const + { + DataType data; + Result result = static_cast( + getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + AccelerationStructureNV::getHandle( size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( getDispatcher()->vkGetAccelerationStructureHandleNV( + static_cast( m_device ), + static_cast( m_accelerationStructure ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD DataType AccelerationStructureNV::getHandle() const + { + DataType data; + Result result = static_cast( getDispatcher()->vkGetAccelerationStructureHandleNV( + static_cast( m_device ), + static_cast( m_accelerationStructure ), + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + } + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( + static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCompileDeferredNV( + static_cast( m_device ), static_cast( m_pipeline ), shader ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); + } + } + + //=== VK_KHR_maintenance3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = + structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + return structureChain; + } + + //=== VK_KHR_draw_indirect_count === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryHostPointerPropertiesEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( + static_cast( m_device ), + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + } + return memoryHostPointerProperties; + } + + //=== VK_AMD_buffer_marker === + + VULKAN_HPP_INLINE void + CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCalibrateableTimeDomainsEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function needs extension enabled!" ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), + &timeDomainCount, + reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + } + return timeDomains; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> + Device::getCalibratedTimestampsEXT( + ArrayProxy const & timestampInfos ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function needs extension enabled!" ); + + std::pair, uint64_t> data( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data.first; + uint64_t & maxDeviation = data.second; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetCalibratedTimestampsEXT( + static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + } + return data; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function needs extension enabled!" ); + + std::pair data; + uint64_t & timestamp = data.first; + uint64_t & maxDeviation = data.second; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetCalibratedTimestampsEXT( + static_cast( m_device ), + 1, + reinterpret_cast( ×tampInfo ), + ×tamp, + &maxDeviation ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + } + return data; + } + + //=== VK_NV_mesh_shader === + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, + uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + drawCount, + stride ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( + uint32_t firstExclusiveScissor, + ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + template + VULKAN_HPP_INLINE void + CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetCheckpointNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), + reinterpret_cast( &checkpointMarker ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetQueueCheckpointDataNV && + "Function needs extension enabled!" ); + + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointDataNV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + std::vector checkpointData( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointDataNV( static_cast( m_queue ), + &checkpointDataCount, + reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount == checkpointData.size() ); + return checkpointData; + } + + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetSemaphoreCounterValueKHR && + "Function needs extension enabled!" ); + + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSemaphoreCounterValueKHR( + static_cast( m_device ), static_cast( m_semaphore ), &value ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); + } + return value; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWaitSemaphoresKHR( + static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR" ); + } + return result; + } + + VULKAN_HPP_INLINE void + Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSignalSemaphoreKHR( + static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + } + } + + //=== VK_INTEL_performance_query === + + VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkInitializePerformanceApiINTEL && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkInitializePerformanceApiINTEL( + static_cast( m_device ), + reinterpret_cast( &initializeInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + } + } + + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkUninitializePerformanceApiINTEL && + "Function needs extension enabled!" ); + + getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPerformanceMarkerINTEL && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCmdSetPerformanceMarkerINTEL( + static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( + static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPerformanceOverrideINTEL && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCmdSetPerformanceOverrideINTEL( + static_cast( m_commandBuffer ), + reinterpret_cast( &overrideInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL + Device::acquirePerformanceConfigurationINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( *this, acquireInfo ); + } + + VULKAN_HPP_INLINE void + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkQueueSetPerformanceConfigurationINTEL && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( + static_cast( m_queue ), static_cast( configuration ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPerformanceParameterINTEL && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPerformanceParameterINTEL( static_cast( m_device ), + static_cast( parameter ), + reinterpret_cast( &value ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + } + return value; + } + + //=== VK_AMD_display_native_hdr === + + VULKAN_HPP_INLINE void + SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && + "Function needs extension enabled!" ); + + getDispatcher()->vkSetLocalDimmingAMD( static_cast( m_device ), + static_cast( m_swapchain ), + static_cast( localDimmingEnable ) ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createImagePipeSurfaceFUCHSIA( + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createMetalSurfaceEXT( + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getFragmentShadingRatesKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function needs extension enabled!" ); + + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + } + return fragmentShadingRates; + } + + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( + const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetFragmentShadingRateKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetFragmentShadingRateKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_EXT_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressEXT && + "Function needs extension enabled!" ); + + return static_cast( getDispatcher()->vkGetBufferDeviceAddressEXT( + static_cast( m_device ), reinterpret_cast( &info ) ) ); + } + + //=== VK_EXT_tooling_info === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getToolPropertiesEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT && + "Function needs extension enabled!" ); + + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), + &toolCount, + reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + } + return toolProperties; + } + + //=== VK_KHR_present_wait === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWaitForPresentKHR( + static_cast( m_device ), static_cast( m_swapchain ), presentId, timeout ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eTimeout ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent" ); + } + return result; + } + + //=== VK_NV_cooperative_matrix === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesNV() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + } + return properties; + } + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function needs extension enabled!" ); + + std::vector combinations; + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( + result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + } + return combinations; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function needs extension enabled!" ); + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( + static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( + static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + } + return presentModes; + } + + VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkAcquireFullScreenExclusiveModeEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( + static_cast( m_device ), static_cast( m_swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); + } + } + + VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkReleaseFullScreenExclusiveModeEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( + static_cast( m_device ), static_cast( m_swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModes2EXT( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( + static_cast( m_device ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + } + return modes; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createHeadlessSurfaceEXT( + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + //=== VK_KHR_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressKHR && + "Function needs extension enabled!" ); + + return static_cast( getDispatcher()->vkGetBufferDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( &info ) ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR && + "Function needs extension enabled!" ); + + return getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( + static_cast( m_device ), reinterpret_cast( &info ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( + const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function needs extension enabled!" ); + + return getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( + static_cast( m_device ), reinterpret_cast( &info ) ); + } + + //=== VK_EXT_line_rasterization === + + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetLineStippleEXT( + static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkResetQueryPoolEXT( + static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + VULKAN_HPP_INLINE void + CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), + static_cast( cullMode ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), + static_cast( frontFace ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( + VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveTopologyEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), + static_cast( primitiveTopology ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( + ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetViewportWithCountEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), + viewports.size(), + reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( + ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetScissorWithCountEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), + scissors.size(), + reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( + uint32_t firstBinding, + ArrayProxy const & buffers, + ArrayProxy const & offsets, + ArrayProxy const & sizes, + ArrayProxy const & strides ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBindVertexBuffers2EXT && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetDepthTestEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), + static_cast( depthTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetDepthWriteEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), + static_cast( depthWriteEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetDepthCompareOpEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), + static_cast( depthCompareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), + static_cast( depthBoundsTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetStencilTestEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), + static_cast( stencilTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR + Device::createDeferredOperationKHR( + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( *this, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR && + "Function needs extension enabled!" ); + + return getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( + static_cast( m_device ), static_cast( m_operation ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeferredOperationResultKHR && + "Function needs extension enabled!" ); + + return static_cast( getDispatcher()->vkGetDeferredOperationResultKHR( + static_cast( m_device ), static_cast( m_operation ) ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkDeferredOperationJoinKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkDeferredOperationJoinKHR( + static_cast( m_device ), static_cast( m_operation ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR ) ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join" ); + } + return result; + } + + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPipelineExecutablePropertiesKHR && + "Function needs extension enabled!" ); + + std::vector properties; + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineExecutablePropertiesKHR( + static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + &executableCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( getDispatcher()->vkGetPipelineExecutablePropertiesKHR( + static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableStatisticsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPipelineExecutableStatisticsKHR && + "Function needs extension enabled!" ); + + std::vector statistics; + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineExecutableStatisticsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &statisticCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( getDispatcher()->vkGetPipelineExecutableStatisticsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + } + return statistics; + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR && + "Function needs extension enabled!" ); + + std::vector internalRepresentations; + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( + getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + } + return internalRepresentations; + } + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getGeneratedCommandsMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPreprocessGeneratedCommandsNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( + static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( + VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdExecuteGeneratedCommandsNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdExecuteGeneratedCommandsNV( + static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBindPipelineShaderGroupNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV + Device::createIndirectCommandsLayoutNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( *this, createInfo, allocator ); + } + + //=== VK_EXT_acquire_drm_display === + + VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireDrmDisplayEXT( + static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, drmFd, connectorId ); + } + + //=== VK_EXT_private_data === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot Device::createPrivateDataSlotEXT( + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), + static_cast( privateDataSlot ), + reinterpret_cast( + static_cast( allocator ) ) ); + } + + VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetPrivateDataEXT( static_cast( m_device ), + static_cast( objectType_ ), + objectHandle, + static_cast( privateDataSlot ), + data ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && + "Function needs extension enabled!" ); + + uint64_t data; + getDispatcher()->vkGetPrivateDataEXT( static_cast( m_device ), + static_cast( objectType_ ), + objectHandle, + static_cast( privateDataSlot ), + &data ); + return data; + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( + const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &encodeInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( + VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetEvent2KHR( static_cast( m_commandBuffer ), + static_cast( event ), + reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdResetEvent2KHR( static_cast( m_commandBuffer ), + static_cast( event ), + static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( ArrayProxy const & events, + ArrayProxy const & dependencyInfos ) + const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && + "Function needs extension enabled!" ); + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdWriteTimestamp2KHR( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( queryPool ), + query ); + } + + VULKAN_HPP_INLINE void Queue::submit2KHR( ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueueSubmit2KHR( static_cast( m_queue ), + submits.size(), + reinterpret_cast( submits.data() ), + static_cast( fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + } + } + + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && + "Function needs extension enabled!" ); + + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + std::vector checkpointData( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), + &checkpointDataCount, + reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( checkpointDataCount == checkpointData.size() ); + return checkpointData; + } + + //=== VK_NV_fragment_shading_rate_enums === + + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetFragmentShadingRateEnumNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( + static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_KHR_copy_commands2 === + + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( + const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( + const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( + const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyBufferToImage2KHR( + static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( + const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyImageToBuffer2KHR( + static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( + const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &blitImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( + const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireWinrtDisplayNV( + static_cast( m_physicalDevice ), static_cast( m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, deviceRelativeId ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createDirectFBSurfaceEXT( + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function needs extension enabled!" ); + + return static_cast( + getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + static_cast( m_physicalDevice ), queueFamilyIndex, &dfb ) ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdTraceRaysKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional< + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, deferredOperation, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional< + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & + pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, deferredOperation, pipelineCache, createInfo, allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( + getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, + uint32_t groupCount ) const + { + DataType data; + Result result = static_cast( + getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + Result result = static_cast( + getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( + result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + } + return data; + } + + template + VULKAN_HPP_NODISCARD DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, + uint32_t groupCount ) const + { + DataType data; + Result result = static_cast( + getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( + result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + } + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdTraceRaysIndirectKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdTraceRaysIndirectKHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Pipeline::getRayTracingShaderGroupStackSizeKHR( + uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR && + "Function needs extension enabled!" ); + + return static_cast( + getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( static_cast( m_device ), + static_cast( m_pipeline ), + group, + static_cast( groupShader ) ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), + pipelineStackSize ); + } + + //=== VK_EXT_vertex_input_dynamic_state === + + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + ArrayProxy const & vertexBindingDescriptions, + ArrayProxy const & vertexAttributeDescriptions ) + const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetVertexInputEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetVertexInputEXT( + static_cast( m_commandBuffer ), + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t Device::getMemoryZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && + "Function needs extension enabled!" ); + + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), + reinterpret_cast( &getZirconHandleInfo ), + &zirconHandle ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + } + return zirconHandle; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( + static_cast( m_device ), + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + } + return memoryZirconHandleProperties; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VULKAN_HPP_INLINE void Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), + reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t Device::getSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA && + "Function needs extension enabled!" ); + + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), + reinterpret_cast( &getZirconHandleInfo ), + &zirconHandle ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + } + return zirconHandle; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA + Device::createBufferCollectionFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( + static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &imageConstraintsInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); + } + } + + VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setBufferConstraints( + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( + static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &bufferConstraintsInfo ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); + } + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA + BufferCollectionFUCHSIA::getProperties() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( + static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &properties ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); + } + return properties; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast( m_device ), + static_cast( m_renderPass ), + reinterpret_cast( &maxWorkgroupSize ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) ) + { + throwResultException( result, + VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + } + return std::make_pair( result, maxWorkgroupSize ); + } + + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); + } + + //=== VK_HUAWEI_invocation_mask === + + VULKAN_HPP_INLINE void + CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdBindInvocationMaskHUAWEI && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdBindInvocationMaskHUAWEI( static_cast( m_commandBuffer ), + static_cast( imageView ), + static_cast( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV Device::getMemoryRemoteAddressNV( + const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetMemoryRemoteAddressNV && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetMemoryRemoteAddressNV( + static_cast( m_device ), + reinterpret_cast( &memoryGetRemoteAddressInfo ), + reinterpret_cast( &address ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + } + return address; + } + + //=== VK_EXT_extended_dynamic_state2 === + + VULKAN_HPP_INLINE void + CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPatchControlPointsEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), + patchControlPoints ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), + static_cast( rasterizerDiscardEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetDepthBiasEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), + static_cast( depthBiasEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), + static_cast( logicOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), + static_cast( primitiveRestartEnable ) ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR Instance::createScreenSurfaceQNX( + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function needs extension enabled!" ); + + return static_cast( + getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( + static_cast( m_physicalDevice ), queueFamilyIndex, &window ) ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( + ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetColorWriteEnableEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdSetColorWriteEnableEXT( static_cast( m_commandBuffer ), + colorWriteEnables.size(), + reinterpret_cast( colorWriteEnables.data() ) ); + } + + //=== VK_EXT_multi_draw === + + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiEXT( ArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( + ArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Optional vertexOffset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDrawMultiIndexedEXT( + static_cast( m_commandBuffer ), + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + stride, + static_cast( vertexOffset ) ); + } + + //=== VK_EXT_pageable_device_local_memory === + + VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkSetDeviceMemoryPriorityEXT && + "Function needs extension enabled!" ); + + getDispatcher()->vkSetDeviceMemoryPriorityEXT( + static_cast( m_device ), static_cast( m_memory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function needs extension enabled!" ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function needs extension enabled!" ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = + structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( + const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function needs extension enabled!" ); + + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + nullptr ); + std::vector sparseMemoryRequirements( + sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount == sparseMemoryRequirements.size() ); + return sparseMemoryRequirements; + } + +#endif + } // namespace VULKAN_HPP_RAII_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_screen.h b/Externals/Vulkan/Include/vulkan/vulkan_screen.h new file mode 100644 index 000000000000..f0ef40a6caff --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_screen.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_SCREEN_H_ +#define VULKAN_SCREEN_H_ 1 + +/* +** Copyright 2015-2022 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +#define VK_QNX_screen_surface 1 +#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 +#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" +typedef VkFlags VkScreenSurfaceCreateFlagsQNX; +typedef struct VkScreenSurfaceCreateInfoQNX { + VkStructureType sType; + const void* pNext; + VkScreenSurfaceCreateFlagsQNX flags; + struct _screen_context* context; + struct _screen_window* window; +} VkScreenSurfaceCreateInfoQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX( + VkInstance instance, + const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_structs.hpp b/Externals/Vulkan/Include/vulkan/vulkan_structs.hpp new file mode 100644 index 000000000000..1f55ae3333ab --- /dev/null +++ b/Externals/Vulkan/Include/vulkan/vulkan_structs.hpp @@ -0,0 +1,102190 @@ +// Copyright 2015-2022 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_STRUCTS_HPP +#define VULKAN_STRUCTS_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + //=============== + //=== STRUCTS === + //=============== + + struct AabbPositionsKHR + { + using NativeType = VkAabbPositionsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, + float minY_ = {}, + float minZ_ = {}, + float maxX_ = {}, + float maxY_ = {}, + float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT + : minX( minX_ ) + , minY( minY_ ) + , minZ( minZ_ ) + , maxX( maxX_ ) + , maxY( maxY_ ) + , maxZ( maxZ_ ) + {} + + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT + { + minX = minX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT + { + minY = minY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT + { + minZ = minZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT + { + maxX = maxX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT + { + maxY = maxY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT + { + maxZ = maxZ_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minX, minY, minZ, maxX, maxY, maxZ ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AabbPositionsKHR const & ) const = default; +#else + bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && + ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); +# endif + } + + bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minX = {}; + float minY = {}; + float minZ = {}; + float maxX = {}; + float maxY = {}; + float maxZ = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AabbPositionsKHR is not nothrow_move_constructible!" ); + using AabbPositionsNV = AabbPositionsKHR; + + union DeviceOrHostAddressConstKHR + { + using NativeType = VkDeviceOrHostAddressConstKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + : deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & + setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDeviceOrHostAddressConstKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +#else + VkDeviceAddress deviceAddress; + const void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureGeometryTrianglesDataKHR + { + using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + uint32_t maxVertex_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexFormat( vertexFormat_ ) + , vertexData( vertexData_ ) + , vertexStride( vertexStride_ ) + , maxVertex( maxVertex_ ) + , indexType( indexType_ ) + , indexData( indexData_ ) + , transformData( transformData_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( + AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryTrianglesDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryTrianglesDataKHR & + operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR & + operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT + { + maxVertex = maxVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + uint32_t maxVertex = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) == + sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryTrianglesDataKHR; + }; + + struct AccelerationStructureGeometryAabbsDataKHR + { + using NativeType = VkAccelerationStructureGeometryAabbsDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryAabbsDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT + : data( data_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( + AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryAabbsDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryAabbsDataKHR & + operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR & + operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & + setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, data, stride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == + sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryAabbsDataKHR; + }; + + struct AccelerationStructureGeometryInstancesDataKHR + { + using NativeType = VkAccelerationStructureGeometryInstancesDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryInstancesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} ) VULKAN_HPP_NOEXCEPT + : arrayOfPointers( arrayOfPointers_ ) + , data( data_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( + AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryInstancesDataKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryInstancesDataKHR & + operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR & + operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + arrayOfPointers = arrayOfPointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, arrayOfPointers, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) == + sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryInstancesDataKHR; + }; + + union AccelerationStructureGeometryDataKHR + { + using NativeType = VkAccelerationStructureGeometryDataKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + : triangles( triangles_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) + : aabbs( aabbs_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + : instances( instances_ ) + {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances( + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + { + instances = instances_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkAccelerationStructureGeometryDataKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryDataKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; +#else + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureGeometryKHR + { + using NativeType = VkAccelerationStructureGeometryKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryKHR & + operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & + setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & + setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, geometryType, geometry, flags ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == + sizeof( VkAccelerationStructureGeometryKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryKHR; + }; + + union DeviceOrHostAddressKHR + { + using NativeType = VkDeviceOrHostAddressKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) + : deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDeviceOrHostAddressKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + void * hostAddress; +#else + VkDeviceAddress deviceAddress; + void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureBuildGeometryInfoKHR + { + using NativeType = VkAccelerationStructureBuildGeometryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureBuildGeometryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + , ppGeometries( ppGeometries_ ) + , scratchData( scratchData_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( + AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildGeometryInfoKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} ) + : type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( static_cast( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ) + , pGeometries( geometries_.data() ) + , ppGeometries( pGeometries_.data() ) + , scratchData( scratchData_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 ); +# else + if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildGeometryInfoKHR & + operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR & + operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructure = dstAccelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT + { + ppGeometries = ppGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setPGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( pGeometries_.size() ); + ppGeometries = pGeometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + type, + flags, + mode, + srcAccelerationStructure, + dstAccelerationStructure, + geometryCount, + pGeometries, + ppGeometries, + scratchData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == + sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureBuildGeometryInfoKHR; + }; + + struct AccelerationStructureBuildRangeInfoKHR + { + using NativeType = VkAccelerationStructureBuildRangeInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, + uint32_t primitiveOffset_ = {}, + uint32_t firstVertex_ = {}, + uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveCount( primitiveCount_ ) + , primitiveOffset( primitiveOffset_ ) + , firstVertex( firstVertex_ ) + , transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildRangeInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildRangeInfoKHR & + operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR & + operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & + setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + primitiveCount = primitiveCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & + setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + { + primitiveOffset = primitiveOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & + setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & + setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && + ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset ); +# endif + } + + bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t primitiveCount = {}; + uint32_t primitiveOffset = {}; + uint32_t firstVertex = {}; + uint32_t transformOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == + sizeof( VkAccelerationStructureBuildRangeInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" ); + + struct AccelerationStructureBuildSizesInfoKHR + { + using NativeType = VkAccelerationStructureBuildSizesInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureBuildSizesInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureSize( accelerationStructureSize_ ) + , updateScratchSize( updateScratchSize_ ) + , buildScratchSize( buildScratchSize_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildSizesInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildSizesInfoKHR & + operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR & + operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureSize = accelerationStructureSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + updateScratchSize = updateScratchSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + buildScratchSize = buildScratchSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureSize == rhs.accelerationStructureSize ) && + ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize ); +# endif + } + + bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == + sizeof( VkAccelerationStructureBuildSizesInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureBuildSizesInfoKHR; + }; + + struct AccelerationStructureCreateInfoKHR + { + using NativeType = VkAccelerationStructureCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : createFlags( createFlags_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + , type( type_ ) + , deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureCreateInfoKHR & + operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR & + operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && + ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) && + ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == + sizeof( VkAccelerationStructureCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoKHR; + }; + + struct GeometryTrianglesNV + { + using NativeType = VkGeometryTrianglesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexData( vertexData_ ) + , vertexOffset( vertexOffset_ ) + , vertexCount( vertexCount_ ) + , vertexStride( vertexStride_ ) + , vertexFormat( vertexFormat_ ) + , indexData( indexData_ ) + , indexOffset( indexOffset_ ) + , indexCount( indexCount_ ) + , indexType( indexType_ ) + , transformData( transformData_ ) + , transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT + { + indexOffset = indexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & + setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + vertexData, + vertexOffset, + vertexCount, + vertexStride, + vertexFormat, + indexData, + indexOffset, + indexCount, + indexType, + transformData, + transformOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryTrianglesNV const & ) const = default; +#else + bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && + ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) && + ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && + ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && + ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) && + ( transformOffset == rhs.transformOffset ); +# endif + } + + bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryTrianglesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GeometryTrianglesNV; + }; + + struct GeometryAABBNV + { + using NativeType = VkGeometryAABBNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : aabbData( aabbData_ ) + , numAABBs( numAABBs_ ) + , stride( stride_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryAABBNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT + { + aabbData = aabbData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT + { + numAABBs = numAABBs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, aabbData, numAABBs, stride, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryAABBNV const & ) const = default; +#else + bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && + ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryAABBNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GeometryAABBNV; + }; + + struct GeometryDataNV + { + using NativeType = VkGeometryDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + : triangles( triangles_ ) + , aabbs( aabbs_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & + setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & + setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( triangles, aabbs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryDataNV const & ) const = default; +#else + bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); +# endif + } + + bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryDataNV is not nothrow_move_constructible!" ); + + struct GeometryNV + { + using NativeType = VkGeometryNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryNV( + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeometryNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & + setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & + setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, geometryType, geometry, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryNV const & ) const = default; +#else + bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && + ( geometry == rhs.geometry ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GeometryNV; + }; + + struct AccelerationStructureInfoNV + { + using NativeType = VkAccelerationStructureInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + {} + + VULKAN_HPP_CONSTEXPR + AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, + uint32_t instanceCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( static_cast( geometries_.size() ) ) + , pGeometries( geometries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & + setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) + VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && + ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) && + ( pGeometries == rhs.pGeometries ); +# endif + } + + bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == + sizeof( VkAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureInfoNV; + }; + + struct AccelerationStructureCreateInfoNV + { + using NativeType = VkAccelerationStructureCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT + : compactedSize( compactedSize_ ) + , info( info_ ) + {} + + VULKAN_HPP_CONSTEXPR + AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureCreateInfoNV & + operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & + setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & + setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compactedSize, info ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && + ( info == rhs.info ); +# endif + } + + bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == + sizeof( VkAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoNV; + }; + + struct AccelerationStructureDeviceAddressInfoKHR + { + using NativeType = VkAccelerationStructureDeviceAddressInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureDeviceAddressInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( + AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureDeviceAddressInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureDeviceAddressInfoKHR & + operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR & + operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == + sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureDeviceAddressInfoKHR; + }; + + struct AccelerationStructureGeometryMotionTrianglesDataNV + { + using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexData( vertexData_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( + AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryMotionTrianglesDataNV( + VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryMotionTrianglesDataNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryMotionTrianglesDataNV & + operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryMotionTrianglesDataNV & + operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) == + sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryMotionTrianglesDataNV; + }; + + struct TransformMatrixKHR + { + using NativeType = VkTransformMatrixKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT + : matrix( matrix_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & + setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT + { + matrix = matrix_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( matrix ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TransformMatrixKHR const & ) const = default; +#else + bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( matrix == rhs.matrix ); +# endif + } + + bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TransformMatrixKHR is not nothrow_move_constructible!" ); + using TransformMatrixNV = TransformMatrixKHR; + + struct AccelerationStructureInstanceKHR + { + using NativeType = VkAccelerationStructureInstanceKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureInstanceKHR & + operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureInstanceKHR & + setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transform, + instanceCustomIndex, + mask, + instanceShaderBindingTableRecordOffset, + flags, + accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; +#else + bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && + ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == + sizeof( VkAccelerationStructureInstanceKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" ); + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + + struct AccelerationStructureMatrixMotionInstanceNV + { + using NativeType = VkAccelerationStructureMatrixMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureMatrixMotionInstanceNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transformT0( transformT0_ ) + , transformT1( transformT1_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( + AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureMatrixMotionInstanceNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMatrixMotionInstanceNV & + operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMatrixMotionInstanceNV & + operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT + { + transformT0 = transformT0_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT + { + transformT1 = transformT1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureMatrixMotionInstanceNV & + setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transformT0, + transformT1, + instanceCustomIndex, + mask, + instanceShaderBindingTableRecordOffset, + flags, + accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default; +#else + bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && + ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && + ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {}; + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) == + sizeof( VkAccelerationStructureMatrixMotionInstanceNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" ); + + struct AccelerationStructureMemoryRequirementsInfoNV + { + using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureMemoryRequirementsInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMemoryRequirementsInfoNV & + operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV & + operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureMemoryRequirementsInfoNV; + }; + + struct AccelerationStructureMotionInfoNV + { + using NativeType = VkAccelerationStructureMotionInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureMotionInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( + uint32_t maxInstances_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInstances( maxInstances_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMotionInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMotionInfoNV & + operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & + setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT + { + maxInstances = maxInstances_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxInstances, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && + ( flags == rhs.flags ); +# endif + } + + bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV; + const void * pNext = {}; + uint32_t maxInstances = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == + sizeof( VkAccelerationStructureMotionInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureMotionInfoNV; + }; + + struct SRTDataNV + { + using NativeType = VkSRTDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SRTDataNV( float sx_ = {}, + float a_ = {}, + float b_ = {}, + float pvx_ = {}, + float sy_ = {}, + float c_ = {}, + float pvy_ = {}, + float sz_ = {}, + float pvz_ = {}, + float qx_ = {}, + float qy_ = {}, + float qz_ = {}, + float qw_ = {}, + float tx_ = {}, + float ty_ = {}, + float tz_ = {} ) VULKAN_HPP_NOEXCEPT + : sx( sx_ ) + , a( a_ ) + , b( b_ ) + , pvx( pvx_ ) + , sy( sy_ ) + , c( c_ ) + , pvy( pvy_ ) + , sz( sz_ ) + , pvz( pvz_ ) + , qx( qx_ ) + , qy( qy_ ) + , qz( qz_ ) + , qw( qw_ ) + , tx( tx_ ) + , ty( ty_ ) + , tz( tz_ ) + {} + + VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT + { + sx = sx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT + { + pvx = pvx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT + { + sy = sy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT + { + c = c_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT + { + pvy = pvy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT + { + sz = sz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT + { + pvz = pvz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT + { + qx = qx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT + { + qy = qy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT + { + qz = qz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT + { + qw = qw_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT + { + tx = tx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT + { + ty = ty_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT + { + tz = tz_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SRTDataNV const & ) const = default; +#else + bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && + ( c == rhs.c ) && ( pvy == rhs.pvy ) && ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && + ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) && ( ty == rhs.ty ) && + ( tz == rhs.tz ); +# endif + } + + bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float sx = {}; + float a = {}; + float b = {}; + float pvx = {}; + float sy = {}; + float c = {}; + float pvy = {}; + float sz = {}; + float pvz = {}; + float qx = {}; + float qy = {}; + float qz = {}; + float qw = {}; + float tx = {}; + float ty = {}; + float tz = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SRTDataNV is not nothrow_move_constructible!" ); + + struct AccelerationStructureSRTMotionInstanceNV + { + using NativeType = VkAccelerationStructureSRTMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AccelerationStructureSRTMotionInstanceNV( VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, + VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transformT0( transformT0_ ) + , transformT1( transformT1_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( + AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : AccelerationStructureSRTMotionInstanceNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureSRTMotionInstanceNV & + operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureSRTMotionInstanceNV & + operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT + { + transformT0 = transformT0_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT + { + transformT1 = transformT1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureSRTMotionInstanceNV & + setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transformT0, + transformT1, + instanceCustomIndex, + mask, + instanceShaderBindingTableRecordOffset, + flags, + accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default; +#else + bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && + ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && + ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {}; + VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == + sizeof( VkAccelerationStructureSRTMotionInstanceNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" ); + + union AccelerationStructureMotionInstanceDataNV + { + using NativeType = VkAccelerationStructureMotionInstanceDataNV; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} ) + : staticInstance( staticInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ ) + : matrixMotionInstance( matrixMotionInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ ) + : srtMotionInstance( srtMotionInstance_ ) + {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance( + VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT + { + staticInstance = staticInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance( + VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) + VULKAN_HPP_NOEXCEPT + { + matrixMotionInstance = matrixMotionInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance( + VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT + { + srtMotionInstance = srtMotionInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkAccelerationStructureMotionInstanceDataNV const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceDataNV &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance; + VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance; +#else + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureMotionInstanceNV + { + using NativeType = VkAccelerationStructureMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , data( data_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMotionInstanceNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMotionInstanceNV & + operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInstanceNV & + operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, flags, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == + sizeof( VkAccelerationStructureMotionInstanceNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" ); + + struct AccelerationStructureVersionInfoKHR + { + using NativeType = VkAccelerationStructureVersionInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAccelerationStructureVersionInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {} ) VULKAN_HPP_NOEXCEPT + : pVersionData( pVersionData_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureVersionInfoKHR & + operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR & + operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & + setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVersionData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); +# endif + } + + bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == + sizeof( VkAccelerationStructureVersionInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AccelerationStructureVersionInfoKHR; + }; + + struct AcquireNextImageInfoKHR + { + using NativeType = VkAcquireNextImageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + , timeout( timeout_ ) + , semaphore( semaphore_ ) + , fence( fence_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & + setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; +#else + bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && + ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == + sizeof( VkAcquireNextImageInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AcquireNextImageInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AcquireNextImageInfoKHR; + }; + + struct AcquireProfilingLockInfoKHR + { + using NativeType = VkAcquireProfilingLockInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, + uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , timeout( timeout_ ) + {} + + VULKAN_HPP_CONSTEXPR + AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, timeout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; +#else + bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); +# endif + } + + bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; + uint64_t timeout = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == + sizeof( VkAcquireProfilingLockInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AcquireProfilingLockInfoKHR; + }; + + struct AllocationCallbacks + { + using NativeType = VkAllocationCallbacks; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + PFN_vkAllocationFunction pfnAllocation_ = {}, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + , pfnAllocation( pfnAllocation_ ) + , pfnReallocation( pfnReallocation_ ) + , pfnFree( pfnFree_ ) + , pfnInternalAllocation( pfnInternalAllocation_ ) + , pfnInternalFree( pfnInternalFree_ ) + {} + + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + : AllocationCallbacks( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnAllocation = pfnAllocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnReallocation = pfnReallocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnFree = pfnFree_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalFree = pfnInternalFree_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AllocationCallbacks const & ) const = default; +#else + bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && + ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) && + ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); +# endif + } + + bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + void * pUserData = {}; + PFN_vkAllocationFunction pfnAllocation = {}; + PFN_vkReallocationFunction pfnReallocation = {}; + PFN_vkFreeFunction pfnFree = {}; + PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; + PFN_vkInternalFreeNotification pfnInternalFree = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AllocationCallbacks is not nothrow_move_constructible!" ); + + struct ComponentMapping + { + using NativeType = VkComponentMapping; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) + VULKAN_HPP_NOEXCEPT + : r( r_ ) + , g( g_ ) + , b( b_ ) + , a( a_ ) + {} + + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + : ComponentMapping( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT + { + r = r_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT + { + g = g_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( r, g, b, a ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComponentMapping const & ) const = default; +#else + bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); +# endif + } + + bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComponentMapping is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatProperties2ANDROID + { + using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = + VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , externalFormat( externalFormat_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( + AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatProperties2ANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatProperties2ANDROID & + operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatProperties2ANDROID & + operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) == + sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatProperties2ANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatPropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = + VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , externalFormat( externalFormat_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatPropertiesANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatPropertiesANDROID & + operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID & + operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferPropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferPropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferPropertiesANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferPropertiesANDROID & + operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID & + operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferPropertiesANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferUsageANDROID + { + using NativeType = VkAndroidHardwareBufferUsageANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAndroidHardwareBufferUsageANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferUsageANDROID & + operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, androidHardwareBufferUsage ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); +# endif + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + void * pNext = {}; + uint64_t androidHardwareBufferUsage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == + sizeof( VkAndroidHardwareBufferUsageANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AndroidHardwareBufferUsageANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidSurfaceCreateInfoKHR + { + using NativeType = VkAndroidSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, + struct ANativeWindow * window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR + AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & + setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow * window = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == + sizeof( VkAndroidSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AndroidSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ApplicationInfo + { + using NativeType = VkApplicationInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char * pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : pApplicationName( pApplicationName_ ) + , applicationVersion( applicationVersion_ ) + , pEngineName( pEngineName_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ApplicationInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationName = pApplicationName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT + { + pEngineName = pEngineName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pApplicationName != rhs.pApplicationName ) + if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) + return cmp; + if ( pEngineName != rhs.pEngineName ) + if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) + return cmp; + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( ( pApplicationName == rhs.pApplicationName ) || + ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) && + ( applicationVersion == rhs.applicationVersion ) && + ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) && + ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void * pNext = {}; + const char * pApplicationName = {}; + uint32_t applicationVersion = {}; + const char * pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ApplicationInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ApplicationInfo; + }; + + struct AttachmentDescription + { + using NativeType = VkAttachmentDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & + setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription const & ) const = default; +#else + bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && + ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && + ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && + ( finalLayout == rhs.finalLayout ); +# endif + } + + bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescription is not nothrow_move_constructible!" ); + + struct AttachmentDescription2 + { + using NativeType = VkAttachmentDescription2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription2( + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & + setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + format, + samples, + loadOp, + storeOp, + stencilLoadOp, + stencilStoreOp, + initialLayout, + finalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription2 const & ) const = default; +#else + bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && + ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && + ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && + ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); +# endif + } + + bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == + sizeof( VkAttachmentDescription2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescription2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AttachmentDescription2; + }; + using AttachmentDescription2KHR = AttachmentDescription2; + + struct AttachmentDescriptionStencilLayout + { + using NativeType = VkAttachmentDescriptionStencilLayout; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eAttachmentDescriptionStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : stencilInitialLayout( stencilInitialLayout_ ) + , stencilFinalLayout( stencilFinalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentDescriptionStencilLayout & + operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout & + operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilInitialLayout = stencilInitialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilFinalLayout = stencilFinalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; +#else + bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && + ( stencilFinalLayout == rhs.stencilFinalLayout ); +# endif + } + + bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == + sizeof( VkAttachmentDescriptionStencilLayout ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AttachmentDescriptionStencilLayout; + }; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + + struct AttachmentReference + { + using NativeType = VkAttachmentReference; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference( + uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & + setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( attachment, layout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference const & ) const = default; +#else + bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); +# endif + } + + bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReference is not nothrow_move_constructible!" ); + + struct AttachmentReference2 + { + using NativeType = VkAttachmentReference2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentReference2( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & + setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachment, layout, aspectMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference2 const & ) const = default; +#else + bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && + ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask ); +# endif + } + + bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; + const void * pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReference2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AttachmentReference2; + }; + using AttachmentReference2KHR = AttachmentReference2; + + struct AttachmentReferenceStencilLayout + { + using NativeType = VkAttachmentReferenceStencilLayout; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : stencilLayout( stencilLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR + AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentReferenceStencilLayout & + operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & + setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilLayout = stencilLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; +#else + bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); +# endif + } + + bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == + sizeof( VkAttachmentReferenceStencilLayout ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AttachmentReferenceStencilLayout; + }; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + + struct AttachmentSampleCountInfoAMD + { + using NativeType = VkAttachmentSampleCountInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT + : colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentSamples( pColorAttachmentSamples_ ) + , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR + AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleCountInfoAMD( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AttachmentSampleCountInfoAMD( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) + : colorAttachmentCount( static_cast( colorAttachmentSamples_.size() ) ) + , pColorAttachmentSamples( colorAttachmentSamples_.data() ) + , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & + setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples( + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentSamples = pColorAttachmentSamples_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AttachmentSampleCountInfoAMD & setColorAttachmentSamples( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentSamples_.size() ); + pColorAttachmentSamples = colorAttachmentSamples_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + depthStencilAttachmentSamples = depthStencilAttachmentSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default; +#else + bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && + ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples ); +# endif + } + + bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == + sizeof( VkAttachmentSampleCountInfoAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = AttachmentSampleCountInfoAMD; + }; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + + struct Extent2D + { + using NativeType = VkExtent2D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + {} + + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExtent2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent2D const & ) const = default; +#else + bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ); +# endif + } + + bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Extent2D is not nothrow_move_constructible!" ); + + struct SampleLocationEXT + { + using NativeType = VkSampleLocationEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationEXT const & ) const = default; +#else + bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SampleLocationEXT is not nothrow_move_constructible!" ); + + struct SampleLocationsInfoEXT + { + using NativeType = VkSampleLocationsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( sampleLocationsCount_ ) + , pSampleLocations( pSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationsInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationsInfoEXT const & ) const = default; +#else + bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && + ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && + ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations ); +# endif + } + + bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == + sizeof( VkSampleLocationsInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SampleLocationsInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SampleLocationsInfoEXT; + }; + + struct AttachmentSampleLocationsEXT + { + using NativeType = VkAttachmentSampleLocationsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( + uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentIndex( attachmentIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & + setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + attachmentIndex = attachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( attachmentIndex, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; +#else + bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachmentIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == + sizeof( VkAttachmentSampleLocationsEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" ); + + struct BaseInStructure + { + using NativeType = VkBaseInStructure; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = + VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ ) + {} + + BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + : BaseInStructure( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BaseInStructure & + setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseInStructure const & ) const = default; +#else + bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BaseInStructure is not nothrow_move_constructible!" ); + + struct BaseOutStructure + { + using NativeType = VkBaseOutStructure; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = + VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ ) + {} + + BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + : BaseOutStructure( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & + setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseOutStructure const & ) const = default; +#else + bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BaseOutStructure is not nothrow_move_constructible!" ); + + struct BindAccelerationStructureMemoryInfoNV + { + using NativeType = VkBindAccelerationStructureMemoryInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBindAccelerationStructureMemoryInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindAccelerationStructureMemoryInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindAccelerationStructureMemoryInfoNV & + operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV & + operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default; +#else + bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); +# endif + } + + bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == + sizeof( VkBindAccelerationStructureMemoryInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindAccelerationStructureMemoryInfoNV; + }; + + struct BindBufferMemoryDeviceGroupInfo + { + using NativeType = VkBindBufferMemoryDeviceGroupInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindBufferMemoryDeviceGroupInfo & + operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & + setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & + setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ); +# endif + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == + sizeof( VkBindBufferMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindBufferMemoryDeviceGroupInfo; + }; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + + struct BindBufferMemoryInfo + { + using NativeType = VkBindBufferMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer, memory, memoryOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); +# endif + } + + bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindBufferMemoryInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindBufferMemoryInfo; + }; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + + struct Offset2D + { + using NativeType = VkOffset2D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkOffset2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset2D const & ) const = default; +#else + bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Offset2D is not nothrow_move_constructible!" ); + + struct Rect2D + { + using NativeType = VkRect2D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRect2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Rect2D const & ) const = default; +#else + bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Rect2D is not nothrow_move_constructible!" ); + + struct BindImageMemoryDeviceGroupInfo + { + using NativeType = VkBindImageMemoryDeviceGroupInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) + , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + splitInstanceBindRegions_ = {} ) + : deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + , splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ) + , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImageMemoryDeviceGroupInfo & + operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); + pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && + ( pDeviceIndices == rhs.pDeviceIndices ) && + ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && + ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); +# endif + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == + sizeof( VkBindImageMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindImageMemoryDeviceGroupInfo; + }; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + struct BindImageMemoryInfo + { + using NativeType = VkBindImageMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, memory, memoryOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryInfo const & ) const = default; +#else + bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ); +# endif + } + + bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemoryInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindImageMemoryInfo; + }; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + + struct BindImageMemorySwapchainInfoKHR + { + using NativeType = VkBindImageMemorySwapchainInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + , imageIndex( imageIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImageMemorySwapchainInfoKHR & + operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & + setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT + { + imageIndex = imageIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, imageIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; +#else + bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && + ( imageIndex == rhs.imageIndex ); +# endif + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == + sizeof( VkBindImageMemorySwapchainInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindImageMemorySwapchainInfoKHR; + }; + + struct BindImagePlaneMemoryInfo + { + using NativeType = VkBindImagePlaneMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; +#else + bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == + sizeof( VkBindImagePlaneMemoryInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindImagePlaneMemoryInfo; + }; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + struct BindIndexBufferIndirectCommandNV + { + using NativeType = VkBindIndexBufferIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , indexType( indexType_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindIndexBufferIndirectCommandNV & + operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, indexType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); +# endif + } + + bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == + sizeof( VkBindIndexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" ); + + struct BindShaderGroupIndirectCommandNV + { + using NativeType = VkBindShaderGroupIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : groupIndex( groupIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindShaderGroupIndirectCommandNV & + operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT + { + groupIndex = groupIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( groupIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; +#else + bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( groupIndex == rhs.groupIndex ); +# endif + } + + bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == + sizeof( VkBindShaderGroupIndirectCommandNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" ); + + struct SparseMemoryBind + { + using NativeType = VkSparseMemoryBind; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceOffset( resourceOffset_ ) + , size( size_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseMemoryBind( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & + setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT + { + resourceOffset = resourceOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & + setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( resourceOffset, size, memory, memoryOffset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseMemoryBind const & ) const = default; +#else + bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseMemoryBind is not nothrow_move_constructible!" ); + + struct SparseBufferMemoryBindInfo + { + using NativeType = VkSparseBufferMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseBufferMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Buffer buffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == + sizeof( VkSparseBufferMemoryBindInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" ); + + struct SparseImageOpaqueMemoryBindInfo + { + using NativeType = VkSparseImageOpaqueMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageOpaqueMemoryBindInfo & + operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & + setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( image, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == + sizeof( VkSparseImageOpaqueMemoryBindInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" ); + + struct ImageSubresource + { + using NativeType = VkImageSubresource; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , arrayLayer( arrayLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresource( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayer = arrayLayer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, mipLevel, arrayLayer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource const & ) const = default; +#else + bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); +# endif + } + + bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource is not nothrow_move_constructible!" ); + + struct Offset3D + { + using NativeType = VkOffset3D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} + + explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkOffset3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset3D const & ) const = default; +#else + bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); +# endif + } + + bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + int32_t z = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Offset3D is not nothrow_move_constructible!" ); + + struct Extent3D + { + using NativeType = VkExtent3D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + {} + + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} + + explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExtent3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height, depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent3D const & ) const = default; +#else + bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); +# endif + } + + bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Extent3D is not nothrow_move_constructible!" ); + + struct SparseImageMemoryBind + { + using NativeType = VkSparseImageMemoryBind; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : subresource( subresource_ ) + , offset( offset_ ) + , extent( extent_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT + { + subresource = subresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & + setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subresource, offset, extent, memory, memoryOffset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBind const & ) const = default; +#else + bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryBind is not nothrow_move_constructible!" ); + + struct SparseImageMemoryBindInfo + { + using NativeType = VkSparseImageMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBindInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo( + VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & + setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & + setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo & setBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( image, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == + sizeof( VkSparseImageMemoryBindInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryBindInfo is not nothrow_move_constructible!" ); + + struct BindSparseInfo + { + using NativeType = VkBindSparseInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , bufferBindCount( bufferBindCount_ ) + , pBufferBinds( pBufferBinds_ ) + , imageOpaqueBindCount( imageOpaqueBindCount_ ) + , pImageOpaqueBinds( pImageOpaqueBinds_ ) + , imageBindCount( imageBindCount_ ) + , pImageBinds( pImageBinds_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindSparseInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageOpaqueBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , bufferBindCount( static_cast( bufferBinds_.size() ) ) + , pBufferBinds( bufferBinds_.data() ) + , imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ) + , pImageOpaqueBinds( imageOpaqueBinds_.data() ) + , imageBindCount( static_cast( imageBinds_.size() ) ) + , pImageBinds( imageBinds_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = bufferBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBufferBinds = pBufferBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setBufferBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = static_cast( bufferBinds_.size() ); + pBufferBinds = bufferBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds( + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageOpaqueBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); + pImageOpaqueBinds = imageOpaqueBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = imageBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageBinds = pImageBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = static_cast( imageBinds_.size() ); + pImageBinds = imageBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setSignalSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphores, + bufferBindCount, + pBufferBinds, + imageOpaqueBindCount, + pImageOpaqueBinds, + imageBindCount, + pImageBinds, + signalSemaphoreCount, + pSignalSemaphores ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindSparseInfo const & ) const = default; +#else + bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && + ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && + ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) && + ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); +# endif + } + + bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindSparseInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BindSparseInfo; + }; + + struct BindVertexBufferIndirectCommandNV + { + using NativeType = VkBindVertexBufferIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR + BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindVertexBufferIndirectCommandNV & + operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & + setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == + sizeof( VkBindVertexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" ); + + struct ImageSubresourceLayers + { + using NativeType = VkImageSubresourceLayers; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceLayers const & ) const = default; +#else + bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == + sizeof( VkImageSubresourceLayers ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresourceLayers is not nothrow_move_constructible!" ); + + struct ImageBlit2 + { + using NativeType = VkImageBlit2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageBlit2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & + setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & + setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit2 const & ) const = default; +#else + bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffsets == rhs.dstOffsets ); +# endif + } + + bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageBlit2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageBlit2; + }; + using ImageBlit2KHR = ImageBlit2; + + struct BlitImageInfo2 + { + using NativeType = VkBlitImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + , filter( filter_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BlitImageInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + , filter( filter_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & + setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & + setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & + setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2 & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT + { + filter = filter_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageInfo2 const & ) const = default; +#else + bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ) && ( filter == rhs.filter ); +# endif + } + + bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BlitImageInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BlitImageInfo2; + }; + using BlitImageInfo2KHR = BlitImageInfo2; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionBufferCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {} ) VULKAN_HPP_NOEXCEPT + : collection( collection_ ) + , index( index_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionBufferCreateInfoFUCHSIA( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionBufferCreateInfoFUCHSIA & + operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionBufferCreateInfoFUCHSIA & + operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && + ( index == rhs.index ); +# endif + } + + bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == + sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCollectionBufferCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionConstraintsInfoFUCHSIA + { + using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferCollectionConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {}, + uint32_t maxBufferCount_ = {}, + uint32_t minBufferCountForCamping_ = {}, + uint32_t minBufferCountForDedicatedSlack_ = {}, + uint32_t minBufferCountForSharedSlack_ = {} ) VULKAN_HPP_NOEXCEPT + : minBufferCount( minBufferCount_ ) + , maxBufferCount( maxBufferCount_ ) + , minBufferCountForCamping( minBufferCountForCamping_ ) + , minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ ) + , minBufferCountForSharedSlack( minBufferCountForSharedSlack_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionConstraintsInfoFUCHSIA( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionConstraintsInfoFUCHSIA & + operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionConstraintsInfoFUCHSIA & + operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCount = minBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + maxBufferCount = maxBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForCamping = minBufferCountForCamping_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForSharedSlack = minBufferCountForSharedSlack_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minBufferCount, + maxBufferCount, + minBufferCountForCamping, + minBufferCountForDedicatedSlack, + minBufferCountForSharedSlack ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && + ( maxBufferCount == rhs.maxBufferCount ) && ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && + ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) && + ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack ); +# endif + } + + bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t minBufferCount = {}; + uint32_t maxBufferCount = {}; + uint32_t minBufferCountForCamping = {}; + uint32_t minBufferCountForDedicatedSlack = {}; + uint32_t minBufferCountForSharedSlack = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == + sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCollectionConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferCollectionCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {} ) VULKAN_HPP_NOEXCEPT + : collectionToken( collectionToken_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionCreateInfoFUCHSIA & + operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & + setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT + { + collectionToken = collectionToken_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collectionToken ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA; + const void * pNext = {}; + zx_handle_t collectionToken = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == + sizeof( VkBufferCollectionCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCollectionCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionImageCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferCollectionImageCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {} ) VULKAN_HPP_NOEXCEPT + : collection( collection_ ) + , index( index_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionImageCreateInfoFUCHSIA( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionImageCreateInfoFUCHSIA & + operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionImageCreateInfoFUCHSIA & + operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && + ( index == rhs.index ); +# endif + } + + bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == + sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCollectionImageCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SysmemColorSpaceFUCHSIA + { + using NativeType = VkSysmemColorSpaceFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {} ) VULKAN_HPP_NOEXCEPT + : colorSpace( colorSpace_ ) + {} + + VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SysmemColorSpaceFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT + { + colorSpace = colorSpace_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorSpace ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default; +# else + bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace ); +# endif + } + + bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA; + const void * pNext = {}; + uint32_t colorSpace = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == + sizeof( VkSysmemColorSpaceFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SysmemColorSpaceFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionPropertiesFUCHSIA + { + using NativeType = VkBufferCollectionPropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferCollectionPropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( + uint32_t memoryTypeBits_ = {}, + uint32_t bufferCount_ = {}, + uint32_t createInfoIndex_ = {}, + uint64_t sysmemPixelFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = + VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + , bufferCount( bufferCount_ ) + , createInfoIndex( createInfoIndex_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , formatFeatures( formatFeatures_ ) + , sysmemColorSpaceIndex( sysmemColorSpaceIndex_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionPropertiesFUCHSIA & + operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeBits = memoryTypeBits_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferCount = bufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT + { + createInfoIndex = createInfoIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT + { + sysmemPixelFormat = sysmemPixelFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + formatFeatures = formatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemColorSpaceIndex( + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT + { + sysmemColorSpaceIndex = sysmemColorSpaceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSamplerYcbcrConversionComponents( + VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrModel( + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + suggestedYcbcrModel = suggestedYcbcrModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + suggestedYcbcrRange = suggestedYcbcrRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + suggestedXChromaOffset = suggestedXChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + suggestedYChromaOffset = suggestedYChromaOffset_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + memoryTypeBits, + bufferCount, + createInfoIndex, + sysmemPixelFormat, + formatFeatures, + sysmemColorSpaceIndex, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && + ( bufferCount == rhs.bufferCount ) && ( createInfoIndex == rhs.createInfoIndex ) && + ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + uint32_t bufferCount = {}; + uint32_t createInfoIndex = {}; + uint64_t sysmemPixelFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == + sizeof( VkBufferCollectionPropertiesFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCollectionPropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct BufferCreateInfo + { + using NativeType = VkBufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::DeviceSize size_, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & + setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & + setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCreateInfo const & ) const = default; +#else + bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && + ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCreateInfo; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferConstraintsInfoFUCHSIA + { + using NativeType = VkBufferConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {} ) + VULKAN_HPP_NOEXCEPT + : createInfo( createInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & + setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT + { + createInfo = createInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + requiredFormatFeatures = requiredFormatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints( + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) + VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && + ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && + ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ); +# endif + } + + bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == + sizeof( VkBufferConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct BufferCopy + { + using NativeType = VkBufferCopy; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcOffset, dstOffset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy const & ) const = default; +#else + bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCopy is not nothrow_move_constructible!" ); + + struct BufferCopy2 + { + using NativeType = VkBufferCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy2( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCopy2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & + setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & + setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcOffset, dstOffset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy2 const & ) const = default; +#else + bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && + ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCopy2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferCopy2; + }; + using BufferCopy2KHR = BufferCopy2; + + struct BufferDeviceAddressCreateInfoEXT + { + using NativeType = VkBufferDeviceAddressCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferDeviceAddressCreateInfoEXT & + operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; +#else + bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == + sizeof( VkBufferDeviceAddressCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferDeviceAddressCreateInfoEXT; + }; + + struct BufferDeviceAddressInfo + { + using NativeType = VkBufferDeviceAddressInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressInfo const & ) const = default; +#else + bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == + sizeof( VkBufferDeviceAddressInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferDeviceAddressInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferDeviceAddressInfo; + }; + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + + struct BufferImageCopy + { + using NativeType = VkBufferImageCopy; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferImageCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & + setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & + setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy const & ) const = default; +#else + bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && + ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && + ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferImageCopy is not nothrow_move_constructible!" ); + + struct BufferImageCopy2 + { + using NativeType = VkBufferImageCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy2( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferImageCopy2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & + setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & + setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy2 const & ) const = default; +#else + bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && + ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferImageCopy2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferImageCopy2; + }; + using BufferImageCopy2KHR = BufferImageCopy2; + + struct BufferMemoryBarrier + { + using NativeType = VkBufferMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier const & ) const = default; +#else + bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryBarrier is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferMemoryBarrier; + }; + + struct BufferMemoryBarrier2 + { + using NativeType = VkBufferMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcStageMask, + srcAccessMask, + dstStageMask, + dstAccessMask, + srcQueueFamilyIndex, + dstQueueFamilyIndex, + buffer, + offset, + size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier2 const & ) const = default; +#else + bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryBarrier2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferMemoryBarrier2; + }; + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; + + struct BufferMemoryRequirementsInfo2 + { + using NativeType = VkBufferMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR + BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferMemoryRequirementsInfo2 & + operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == + sizeof( VkBufferMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferMemoryRequirementsInfo2; + }; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + + struct BufferOpaqueCaptureAddressCreateInfo + { + using NativeType = VkBufferOpaqueCaptureAddressCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eBufferOpaqueCaptureAddressCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferOpaqueCaptureAddressCreateInfo & + operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo & + operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & + setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; +#else + bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == + sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferOpaqueCaptureAddressCreateInfo; + }; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + + struct BufferViewCreateInfo + { + using NativeType = VkBufferViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , buffer( buffer_ ) + , format( format_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & + setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, buffer, format, offset, range ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferViewCreateInfo const & ) const = default; +#else + bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && + ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range ); +# endif + } + + bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferViewCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = BufferViewCreateInfo; + }; + + struct CalibratedTimestampInfoEXT + { + using NativeType = VkCalibratedTimestampInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = + VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT + : timeDomain( timeDomain_ ) + {} + + VULKAN_HPP_CONSTEXPR + CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CalibratedTimestampInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & + setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT + { + timeDomain = timeDomain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timeDomain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default; +#else + bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); +# endif + } + + bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) == + sizeof( VkCalibratedTimestampInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CalibratedTimestampInfoEXT; + }; + + struct CheckpointData2NV + { + using NativeType = VkCheckpointData2NV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + : stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + {} + + VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : CheckpointData2NV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, pCheckpointMarker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointData2NV const & ) const = default; +#else + bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); +# endif + } + + bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {}; + void * pCheckpointMarker = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointData2NV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CheckpointData2NV; + }; + + struct CheckpointDataNV + { + using NativeType = VkCheckpointDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + : stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + {} + + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CheckpointDataNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, pCheckpointMarker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointDataNV const & ) const = default; +#else + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && + ( pCheckpointMarker == rhs.pCheckpointMarker ); +# endif + } + + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; + void * pCheckpointMarker = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointDataNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CheckpointDataNV; + }; + + union ClearColorValue + { + using NativeType = VkClearColorValue; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkClearColorValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast( this ); + } + + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; + }; + + struct ClearDepthStencilValue + { + using NativeType = VkClearDepthStencilValue; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + : depth( depth_ ) + , stencil( stencil_ ) + {} + + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT + { + stencil = stencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( depth, stencil ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearDepthStencilValue const & ) const = default; +#else + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); +# endif + } + + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float depth = {}; + uint32_t stencil = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == + sizeof( VkClearDepthStencilValue ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearDepthStencilValue is not nothrow_move_constructible!" ); + + union ClearValue + { + using NativeType = VkClearValue; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) + : depthStencil( depthStencil_ ) + {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearValue & + setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearValue & + setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + { + depthStencil = depthStencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkClearValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearValue &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct ClearAttachment + { + using NativeType = VkClearAttachment; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , colorAttachment( colorAttachment_ ) + , clearValue( clearValue_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearAttachment( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachment = colorAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & + setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, colorAttachment, clearValue ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearAttachment is not nothrow_move_constructible!" ); + + struct ClearRect + { + using NativeType = VkClearRect; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : rect( rect_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT + { + rect = rect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkClearRect &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( rect, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearRect const & ) const = default; +#else + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearRect is not nothrow_move_constructible!" ); + + struct CoarseSampleLocationNV + { + using NativeType = VkCoarseSampleLocationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + : pixelX( pixelX_ ) + , pixelY( pixelY_ ) + , sample( sample_ ) + {} + + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT + { + pixelX = pixelX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT + { + pixelY = pixelY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT + { + sample = sample_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pixelX, pixelY, sample ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleLocationNV const & ) const = default; +#else + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); +# endif + } + + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t pixelX = {}; + uint32_t pixelY = {}; + uint32_t sample = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == + sizeof( VkCoarseSampleLocationNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CoarseSampleLocationNV is not nothrow_move_constructible!" ); + + struct CoarseSampleOrderCustomNV + { + using NativeType = VkCoarseSampleOrderCustomNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( sampleLocationCount_ ) + , pSampleLocations( pSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR + CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV( + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, + uint32_t sampleCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleCount = sampleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = sampleLocationCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; +#else + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && + ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations ); +# endif + } + + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == + sizeof( VkCoarseSampleOrderCustomNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" ); + + struct CommandBufferAllocateInfo + { + using NativeType = VkCommandBufferAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( + VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT + : commandPool( commandPool_ ) + , level( level_ ) + , commandBufferCount( commandBufferCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & + setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT + { + commandPool = commandPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & + setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT + { + level = level_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & + setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandPool, level, commandBufferCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; +#else + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && + ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount ); +# endif + } + + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == + sizeof( VkCommandBufferAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferAllocateInfo; + }; + + struct CommandBufferInheritanceInfo + { + using NativeType = VkCommandBufferInheritanceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , subpass( subpass_ ) + , framebuffer( framebuffer_ ) + , occlusionQueryEnable( occlusionQueryEnable_ ) + , queryFlags( queryFlags_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + { + queryFlags = queryFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) && + ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); +# endif + } + + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == + sizeof( VkCommandBufferInheritanceInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceInfo; + }; + + struct CommandBufferBeginInfo + { + using NativeType = VkCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pInheritanceInfo( pInheritanceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & + setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo( + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pInheritanceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; +#else + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pInheritanceInfo == rhs.pInheritanceInfo ); +# endif + } + + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == + sizeof( VkCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferBeginInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferBeginInfo; + }; + + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRenderingEnable( conditionalRenderingEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( + CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT( + VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceConditionalRenderingInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRenderingEnable = conditionalRenderingEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRenderingEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; +#else + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); +# endif + } + + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) == + sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value, + "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; + }; + + struct CommandBufferInheritanceRenderPassTransformInfoQCOM + { + using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + , renderArea( renderArea_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM( + VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderPassTransformInfoQCOM( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform, renderArea ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && + ( renderArea == rhs.renderArea ); +# endif + } + + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) == + sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, + "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; + }; + + struct CommandBufferInheritanceRenderingInfo + { + using NativeType = VkCommandBufferInheritanceRenderingInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceRenderingInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( + VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentFormats( pColorAttachmentFormats_ ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , rasterizationSamples( rasterizationSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderingInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo( + VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) + : flags( flags_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , rasterizationSamples( rasterizationSamples_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceRenderingInfo & + operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderingInfo & + operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + viewMask, + colorAttachmentCount, + pColorAttachmentFormats, + depthAttachmentFormat, + stencilAttachmentFormat, + rasterizationSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && + ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) && + ( rasterizationSamples == rhs.rasterizationSamples ); +# endif + } + + bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == + sizeof( VkCommandBufferInheritanceRenderingInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderingInfo; + }; + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; + + struct Viewport + { + using NativeType = VkViewport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, + float y_ = {}, + float width_ = {}, + float height_ = {}, + float minDepth_ = {}, + float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + { + minDepth = minDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxDepth = maxDepth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkViewport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkViewport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, width, height, minDepth, maxDepth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Viewport const & ) const = default; +#else + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth ); +# endif + } + + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Viewport is not nothrow_move_constructible!" ); + + struct CommandBufferInheritanceViewportScissorInfoNV + { + using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, + uint32_t viewportDepthCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {} ) VULKAN_HPP_NOEXCEPT + : viewportScissor2D( viewportScissor2D_ ) + , viewportDepthCount( viewportDepthCount_ ) + , pViewportDepths( pViewportDepths_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( + CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceViewportScissorInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceViewportScissorInfoNV & + operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV & + operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + viewportScissor2D = viewportScissor2D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportDepthCount = viewportDepthCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT + { + pViewportDepths = pViewportDepths_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; +#else + bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && + ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); +# endif + } + + bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; + uint32_t viewportDepthCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) == + sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferInheritanceViewportScissorInfoNV; + }; + + struct CommandBufferSubmitInfo + { + using NativeType = VkCommandBufferSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : commandBuffer( commandBuffer_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferSubmitInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & + setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + commandBuffer = commandBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandBuffer, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferSubmitInfo const & ) const = default; +#else + bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && + ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; + uint32_t deviceMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == + sizeof( VkCommandBufferSubmitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferSubmitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandBufferSubmitInfo; + }; + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + + struct CommandPoolCreateInfo + { + using NativeType = VkCommandPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & + setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; +#else + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ); +# endif + } + + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandPoolCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CommandPoolCreateInfo; + }; + + struct SpecializationMapEntry + { + using NativeType = VkSpecializationMapEntry; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : constantID( constantID_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT + { + constantID = constantID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( constantID, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationMapEntry const & ) const = default; +#else + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t constantID = {}; + uint32_t offset = {}; + size_t size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == + sizeof( VkSpecializationMapEntry ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SpecializationMapEntry is not nothrow_move_constructible!" ); + + struct SpecializationInfo + { + using NativeType = VkSpecializationInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : mapEntryCount( mapEntryCount_ ) + , pMapEntries( pMapEntries_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ) + , pMapEntries( mapEntries_.data() ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = mapEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & + setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT + { + pMapEntries = pMapEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SpecializationInfo & setMapEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( mapEntryCount, pMapEntries, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationInfo const & ) const = default; +#else + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && + ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SpecializationInfo is not nothrow_move_constructible!" ); + + struct PipelineShaderStageCreateInfo + { + using NativeType = VkPipelineShaderStageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stage( stage_ ) + , module( module_ ) + , pName( pName_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageCreateInfo & + operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo( + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && + ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == + sizeof( VkPipelineShaderStageCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineShaderStageCreateInfo; + }; + + struct ComputePipelineCreateInfo + { + using NativeType = VkComputePipelineCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stage( stage_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & + setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; +#else + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == + sizeof( VkComputePipelineCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ComputePipelineCreateInfo; + }; + + struct ConditionalRenderingBeginInfoEXT + { + using NativeType = VkConditionalRenderingBeginInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ConditionalRenderingBeginInfoEXT & + operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer, offset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; +#else + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( flags == rhs.flags ); +# endif + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == + sizeof( VkConditionalRenderingBeginInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ConditionalRenderingBeginInfoEXT; + }; + + struct ConformanceVersion + { + using NativeType = VkConformanceVersion; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, + uint8_t minor_ = {}, + uint8_t subminor_ = {}, + uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + : major( major_ ) + , minor( minor_ ) + , subminor( subminor_ ) + , patch( patch_ ) + {} + + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + : ConformanceVersion( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT + { + major = major_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT + { + minor = minor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT + { + subminor = subminor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT + { + patch = patch_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( major, minor, subminor, patch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; +#else + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); +# endif + } + + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint8_t major = {}; + uint8_t minor = {}; + uint8_t subminor = {}; + uint8_t patch = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConformanceVersion is not nothrow_move_constructible!" ); + using ConformanceVersionKHR = ConformanceVersion; + + struct CooperativeMatrixPropertiesNV + { + using NativeType = VkCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( + uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT + : MSize( MSize_ ) + , NSize( NSize_ ) + , KSize( KSize_ ) + , AType( AType_ ) + , BType( BType_ ) + , CType( CType_ ) + , DType( DType_ ) + , scope( scope_ ) + {} + + VULKAN_HPP_CONSTEXPR + CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CooperativeMatrixPropertiesNV & + operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT + { + MSize = MSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT + { + NSize = NSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT + { + KSize = KSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT + { + AType = AType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT + { + BType = BType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT + { + CType = CType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT + { + DType = DType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & + setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT + { + scope = scope_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && + ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( DType == rhs.DType ) && ( scope == rhs.scope ); +# endif + } + + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == + sizeof( VkCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesNV; + }; + + struct CopyAccelerationStructureInfoKHR + { + using NativeType = VkCopyAccelerationStructureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyAccelerationStructureInfoKHR & + operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && + ( mode == rhs.mode ); +# endif + } + + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == + sizeof( VkCopyAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureInfoKHR; + }; + + struct CopyAccelerationStructureToMemoryInfoKHR + { + using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( + CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureToMemoryInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyAccelerationStructureToMemoryInfoKHR & + operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR & + operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == + sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyAccelerationStructureToMemoryInfoKHR; + }; + + struct CopyBufferInfo2 + { + using NativeType = VkCopyBufferInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & + setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & + setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2 & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferInfo2 const & ) const = default; +#else + bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && + ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyBufferInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyBufferInfo2; + }; + using CopyBufferInfo2KHR = CopyBufferInfo2; + + struct CopyBufferToImageInfo2 + { + using NativeType = VkCopyBufferToImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferToImageInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & + setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & + setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & + setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2 & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferToImageInfo2 const & ) const = default; +#else + bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == + sizeof( VkCopyBufferToImageInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyBufferToImageInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyBufferToImageInfo2; + }; + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; + + struct CopyCommandTransformInfoQCOM + { + using NativeType = VkCopyCommandTransformInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + {} + + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; +#else + bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); +# endif + } + + bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == + sizeof( VkCopyCommandTransformInfoQCOM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyCommandTransformInfoQCOM; + }; + + struct CopyDescriptorSet + { + using NativeType = VkCopyDescriptorSet; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSet( srcSet_ ) + , srcBinding( srcBinding_ ) + , srcArrayElement( srcArrayElement_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & + setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT + { + srcSet = srcSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT + { + srcBinding = srcBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + srcArrayElement = srcArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & + setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyDescriptorSet const & ) const = default; +#else + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && + ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) && + ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); +# endif + } + + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyDescriptorSet is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyDescriptorSet; + }; + + struct ImageCopy2 + { + using NativeType = VkImageCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCopy2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & + setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & + setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy2 const & ) const = default; +#else + bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCopy2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageCopy2; + }; + using ImageCopy2KHR = ImageCopy2; + + struct CopyImageInfo2 + { + using NativeType = VkCopyImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & + setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & + setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & + setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2 & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageInfo2 const & ) const = default; +#else + bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyImageInfo2; + }; + using CopyImageInfo2KHR = CopyImageInfo2; + + struct CopyImageToBufferInfo2 + { + using NativeType = VkCopyImageToBufferInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToBufferInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & + setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & + setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & + setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & + setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2 & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToBufferInfo2 const & ) const = default; +#else + bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstBuffer == rhs.dstBuffer ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == + sizeof( VkCopyImageToBufferInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToBufferInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyImageToBufferInfo2; + }; + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + + struct CopyMemoryToAccelerationStructureInfoKHR + { + using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT + : src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( + CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : CopyMemoryToAccelerationStructureInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToAccelerationStructureInfoKHR & + operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR & + operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == + sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; + + struct CuFunctionCreateInfoNVX + { + using NativeType = VkCuFunctionCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, + const char * pName_ = {} ) VULKAN_HPP_NOEXCEPT + : module( module_ ) + , pName( pName_ ) + {} + + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & + setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, module, pName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); + } + + bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; + const char * pName = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == + sizeof( VkCuFunctionCreateInfoNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CuFunctionCreateInfoNVX; + }; + + struct CuLaunchInfoNVX + { + using NativeType = VkCuLaunchInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {} ) VULKAN_HPP_NOEXCEPT + : function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( paramCount_ ) + , pParams( pParams_ ) + , extraCount( extraCount_ ) + , pExtras( pExtras_ ) + {} + + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {} ) + : function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & + setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & + setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = params_.size(); + pParams = params_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & + setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extras_.size(); + pExtras = extras_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + function, + gridDimX, + gridDimY, + gridDimZ, + blockDimX, + blockDimY, + blockDimZ, + sharedMemBytes, + paramCount, + pParams, + extraCount, + pExtras ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuLaunchInfoNVX const & ) const = default; +#else + bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && + ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) && + ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && + ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras ); +# endif + } + + bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuLaunchInfoNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CuLaunchInfoNVX; + }; + + struct CuModuleCreateInfoNVX + { + using NativeType = VkCuModuleCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleCreateInfoNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = CuModuleCreateInfoNVX; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct D3D12FenceSubmitInfoKHR + { + using NativeType = VkD3D12FenceSubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + {} + + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & + setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & + setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & + setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & + setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreValuesCount, + pWaitSemaphoreValues, + signalSemaphoreValuesCount, + pSignalSemaphoreValues ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == + sizeof( VkD3D12FenceSubmitInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = D3D12FenceSubmitInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct DebugMarkerMarkerInfoEXT + { + using NativeType = VkDebugMarkerMarkerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT + : pMarkerName( pMarkerName_ ) + , color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT + { + pMarkerName = pMarkerName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pMarkerName, color ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pMarkerName != rhs.pMarkerName ) + if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == + sizeof( VkDebugMarkerMarkerInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugMarkerMarkerInfoEXT; + }; + + struct DebugMarkerObjectNameInfoEXT + { + using NativeType = VkDebugMarkerObjectNameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , object( object_ ) + , pObjectName( pObjectName_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & + setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, object, pObjectName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = object <=> rhs.object; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == + sizeof( VkDebugMarkerObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugMarkerObjectNameInfoEXT; + }; + + struct DebugMarkerObjectTagInfoEXT + { + using NativeType = VkDebugMarkerObjectTagInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT & + setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == + sizeof( VkDebugMarkerObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugMarkerObjectTagInfoEXT; + }; + + struct DebugReportCallbackCreateInfoEXT + { + using NativeType = VkDebugReportCallbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pfnCallback( pfnCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugReportCallbackCreateInfoEXT & + operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnCallback = pfnCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnCallback, pUserData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default; +#else + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); +# endif + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + PFN_vkDebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == + sizeof( VkDebugReportCallbackCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugReportCallbackCreateInfoEXT; + }; + + struct DebugUtilsLabelEXT + { + using NativeType = VkDebugUtilsLabelEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {}, + std::array const & color_ = {} ) VULKAN_HPP_NOEXCEPT + : pLabelName( pLabelName_ ) + , color( color_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT + { + pLabelName = pLabelName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pLabelName, color ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pLabelName != rhs.pLabelName ) + if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsLabelEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugUtilsLabelEXT; + }; + + struct DebugUtilsObjectNameInfoEXT + { + using NativeType = VkDebugUtilsObjectNameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , pObjectName( pObjectName_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & + setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, objectHandle, pObjectName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == + sizeof( VkDebugUtilsObjectNameInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugUtilsObjectNameInfoEXT; + }; + + struct DebugUtilsMessengerCallbackDataEXT + { + using NativeType = VkDebugUtilsMessengerCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDebugUtilsMessengerCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( queueLabelCount_ ) + , pQueueLabels( pQueueLabels_ ) + , cmdBufLabelCount( cmdBufLabelCount_ ) + , pCmdBufLabels( pCmdBufLabels_ ) + , objectCount( objectCount_ ) + , pObjects( pObjects_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, + const char * pMessageIdName_, + int32_t messageIdNumber_, + const char * pMessage_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueLabels_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + cmdBufLabels_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + objects_ = {} ) + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( static_cast( queueLabels_.size() ) ) + , pQueueLabels( queueLabels_.data() ) + , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) + , pCmdBufLabels( cmdBufLabels_.data() ) + , objectCount( static_cast( objects_.size() ) ) + , pObjects( objects_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsMessengerCallbackDataEXT & + operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT & + operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + { + pMessageIdName = pMessageIdName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT + { + messageIdNumber = messageIdNumber_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT + { + pMessage = pMessage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = queueLabelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + { + pQueueLabels = pQueueLabels_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setQueueLabels( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueLabels_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = objectCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT + { + pObjects = pObjects_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & setObjects( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + objects_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + pMessageIdName, + messageIdNumber, + pMessage, + queueLabelCount, + pQueueLabels, + cmdBufLabelCount, + pCmdBufLabels, + objectCount, + pObjects ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( pMessageIdName != rhs.pMessageIdName ) + if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) + return cmp; + if ( pMessage != rhs.pMessage ) + if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) + return cmp; + if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) + return cmp; + if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) + return cmp; + if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) && + ( messageIdNumber == rhs.messageIdNumber ) && + ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) && + ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && + ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && ( pCmdBufLabels == rhs.pCmdBufLabels ) && + ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == + sizeof( VkDebugUtilsMessengerCallbackDataEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCallbackDataEXT; + }; + + struct DebugUtilsMessengerCreateInfoEXT + { + using NativeType = VkDebugUtilsMessengerCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , messageSeverity( messageSeverity_ ) + , messageType( messageType_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsMessengerCreateInfoEXT & + operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + { + messageSeverity = messageSeverity_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + { + messageType = messageType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); +# endif + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == + sizeof( VkDebugUtilsMessengerCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCreateInfoEXT; + }; + + struct DebugUtilsObjectTagInfoEXT + { + using NativeType = VkDebugUtilsObjectTagInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + {} + + VULKAN_HPP_CONSTEXPR + DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & + setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT & + setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && + ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == + sizeof( VkDebugUtilsObjectTagInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DebugUtilsObjectTagInfoEXT; + }; + + struct DedicatedAllocationBufferCreateInfoNV + { + using NativeType = VkDedicatedAllocationBufferCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationBufferCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) + VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationBufferCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DedicatedAllocationBufferCreateInfoNV & + operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV & + operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); +# endif + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == + sizeof( VkDedicatedAllocationBufferCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DedicatedAllocationBufferCreateInfoNV; + }; + + struct DedicatedAllocationImageCreateInfoNV + { + using NativeType = VkDedicatedAllocationImageCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocation( dedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DedicatedAllocationImageCreateInfoNV & + operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV & + operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); +# endif + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == + sizeof( VkDedicatedAllocationImageCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DedicatedAllocationImageCreateInfoNV; + }; + + struct DedicatedAllocationMemoryAllocateInfoNV + { + using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationMemoryAllocateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DedicatedAllocationMemoryAllocateInfoNV & + operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV & + operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & + setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == + sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DedicatedAllocationMemoryAllocateInfoNV; + }; + + struct MemoryBarrier2 + { + using NativeType = VkMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrier2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier2 const & ) const = default; +#else + bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ); +# endif + } + + bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryBarrier2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryBarrier2; + }; + using MemoryBarrier2KHR = MemoryBarrier2; + + struct ImageSubresourceRange + { + using NativeType = VkImageSubresourceRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT + { + baseMipLevel = baseMipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceRange const & ) const = default; +#else + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && + ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresourceRange is not nothrow_move_constructible!" ); + + struct ImageMemoryBarrier2 + { + using NativeType = VkImageMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryBarrier2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcStageMask, + srcAccessMask, + dstStageMask, + dstAccessMask, + oldLayout, + newLayout, + srcQueueFamilyIndex, + dstQueueFamilyIndex, + image, + subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier2 const & ) const = default; +#else + bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryBarrier2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier2; + }; + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + + struct DependencyInfo + { + using NativeType = VkDependencyInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + uint32_t memoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, + uint32_t bufferMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, + uint32_t imageMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {} ) VULKAN_HPP_NOEXCEPT + : dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( memoryBarrierCount_ ) + , pMemoryBarriers( pMemoryBarriers_ ) + , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ) + , pBufferMemoryBarriers( pBufferMemoryBarriers_ ) + , imageMemoryBarrierCount( imageMemoryBarrierCount_ ) + , pImageMemoryBarriers( pImageMemoryBarriers_ ) + {} + + VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DependencyInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo( + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferMemoryBarriers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageMemoryBarriers_ = {} ) + : dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) + , pMemoryBarriers( memoryBarriers_.data() ) + , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) + , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) + , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) + , pImageMemoryBarriers( imageMemoryBarriers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = memoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryBarriers = pMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + memoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = static_cast( memoryBarriers_.size() ); + pMemoryBarriers = memoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = bufferMemoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers( + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pBufferMemoryBarriers = pBufferMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setBufferMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); + pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = imageMemoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers( + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pImageMemoryBarriers = pImageMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setImageMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); + pImageMemoryBarriers = imageMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DependencyInfo const & ) const = default; +#else + bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && + ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && + ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && + ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && + ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); +# endif + } + + bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t memoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {}; + uint32_t bufferMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {}; + uint32_t imageMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DependencyInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DependencyInfo; + }; + using DependencyInfoKHR = DependencyInfo; + + struct DescriptorBufferInfo + { + using NativeType = VkDescriptorBufferInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & + setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, offset, range ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferInfo const & ) const = default; +#else + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); +# endif + } + + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferInfo is not nothrow_move_constructible!" ); + + struct DescriptorImageInfo + { + using NativeType = VkDescriptorImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = + VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & + setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & + setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & + setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sampler, imageView, imageLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorImageInfo const & ) const = default; +#else + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); +# endif + } + + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorImageInfo is not nothrow_move_constructible!" ); + + struct DescriptorPoolSize + { + using NativeType = VkDescriptorPoolSize; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & + setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, descriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolSize const & ) const = default; +#else + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); +# endif + } + + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolSize is not nothrow_move_constructible!" ); + + struct DescriptorPoolCreateInfo + { + using NativeType = VkDescriptorPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( poolSizeCount_ ) + , pPoolSizes( pPoolSizes_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, + uint32_t maxSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( static_cast( poolSizes_.size() ) ) + , pPoolSizes( poolSizes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + { + maxSets = maxSets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = poolSizeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & + setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + { + pPoolSizes = pPoolSizes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo & setPoolSizes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) + VULKAN_HPP_NOEXCEPT + { + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); +# endif + } + + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == + sizeof( VkDescriptorPoolCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorPoolCreateInfo; + }; + + struct DescriptorPoolInlineUniformBlockCreateInfo + { + using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( + DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorPoolInlineUniformBlockCreateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorPoolInlineUniformBlockCreateInfo & + operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfo & + operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + { + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxInlineUniformBlockBindings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); +# endif + } + + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == + sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorPoolInlineUniformBlockCreateInfo; + }; + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + + struct DescriptorSetAllocateInfo + { + using NativeType = VkDescriptorSetAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pSetLayouts( pSetLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo( + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & + setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPool = descriptorPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & + setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo & setSetLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts ); +# endif + } + + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == + sizeof( VkDescriptorSetAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorSetAllocateInfo; + }; + + struct DescriptorSetLayoutBinding + { + using NativeType = VkDescriptorSetLayoutBinding; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( descriptorCount_ ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( pImmutableSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding( + uint32_t binding_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( static_cast( immutableSamplers_.size() ) ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( immutableSamplers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & + setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & + setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & + setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding & setImmutableSamplers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && + ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) && + ( pImmutableSamplers == rhs.pImmutableSamplers ); +# endif + } + + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == + sizeof( VkDescriptorSetLayoutBinding ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutBinding is not nothrow_move_constructible!" ); + + struct DescriptorSetLayoutBindingFlagsCreateInfo + { + using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : bindingCount( bindingCount_ ) + , pBindingFlags( pBindingFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( + DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBindingFlagsCreateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindingFlags_ ) + : bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo & + operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + pBindingFlags = pBindingFlags_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bindingCount, pBindingFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && + ( pBindingFlags == rhs.pBindingFlags ); +# endif + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == + sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorSetLayoutBindingFlagsCreateInfo; + }; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + + struct DescriptorSetLayoutCreateInfo + { + using NativeType = VkDescriptorSetLayoutCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , bindingCount( bindingCount_ ) + , pBindings( pBindings_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindings_ ) + : flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutCreateInfo & + operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT + { + pBindings = pBindings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo & setBindings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bindings_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, bindingCount, pBindings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings ); +# endif + } + + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == + sizeof( VkDescriptorSetLayoutCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorSetLayoutCreateInfo; + }; + + struct DescriptorSetLayoutSupport + { + using NativeType = VkDescriptorSetLayoutSupport; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT + : supported( supported_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); +# endif + } + + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == + sizeof( VkDescriptorSetLayoutSupport ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutSupport is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorSetLayoutSupport; + }; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + struct DescriptorSetVariableDescriptorCountAllocateInfo + { + using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorSetCount( descriptorSetCount_ ) + , pDescriptorCounts( pDescriptorCounts_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( + DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountAllocateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) + : descriptorSetCount( static_cast( descriptorCounts_.size() ) ) + , pDescriptorCounts( descriptorCounts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo & + operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & + setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); +# endif + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) == + sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + }; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + + struct DescriptorSetVariableDescriptorCountLayoutSupport + { + using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( + DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) + VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountLayoutSupport( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVariableDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); +# endif + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) == + sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + }; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + + struct DescriptorUpdateTemplateEntry + { + using NativeType = VkDescriptorUpdateTemplateEntry; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , offset( offset_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR + DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorUpdateTemplateEntry & + operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & + setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & + setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( offset == rhs.offset ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == + sizeof( VkDescriptorUpdateTemplateEntry ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" ); + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + struct DescriptorUpdateTemplateCreateInfo + { + using NativeType = VkDescriptorUpdateTemplateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDescriptorUpdateTemplateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) + , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorUpdateEntries_, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) + : flags( flags_ ) + , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) + , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorUpdateTemplateCreateInfo & + operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo & + operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + { + templateType = templateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + descriptorUpdateEntryCount, + pDescriptorUpdateEntries, + templateType, + descriptorSetLayout, + pipelineBindPoint, + pipelineLayout, + set ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set ); +# endif + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == + sizeof( VkDescriptorUpdateTemplateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DescriptorUpdateTemplateCreateInfo; + }; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + struct DeviceBufferMemoryRequirements + { + using NativeType = VkDeviceBufferMemoryRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( + const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pCreateInfo( pCreateInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceBufferMemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceBufferMemoryRequirements & + operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & + setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default; +#else + bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); +# endif + } + + bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == + sizeof( VkDeviceBufferMemoryRequirements ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceBufferMemoryRequirements; + }; + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + + struct DeviceQueueCreateInfo + { + using NativeType = VkDeviceQueueCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( queueCount_ ) + , pQueuePriorities( pQueuePriorities_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( static_cast( queuePriorities_.size() ) ) + , pQueuePriorities( queuePriorities_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & + setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = queueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & + setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo & setQueuePriorities( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; +#else + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) && + ( pQueuePriorities == rhs.pQueuePriorities ); +# endif + } + + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceQueueCreateInfo; + }; + + struct PhysicalDeviceFeatures + { + using NativeType = VkPhysicalDeviceFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess( robustBufferAccess_ ) + , fullDrawIndexUint32( fullDrawIndexUint32_ ) + , imageCubeArray( imageCubeArray_ ) + , independentBlend( independentBlend_ ) + , geometryShader( geometryShader_ ) + , tessellationShader( tessellationShader_ ) + , sampleRateShading( sampleRateShading_ ) + , dualSrcBlend( dualSrcBlend_ ) + , logicOp( logicOp_ ) + , multiDrawIndirect( multiDrawIndirect_ ) + , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) + , depthClamp( depthClamp_ ) + , depthBiasClamp( depthBiasClamp_ ) + , fillModeNonSolid( fillModeNonSolid_ ) + , depthBounds( depthBounds_ ) + , wideLines( wideLines_ ) + , largePoints( largePoints_ ) + , alphaToOne( alphaToOne_ ) + , multiViewport( multiViewport_ ) + , samplerAnisotropy( samplerAnisotropy_ ) + , textureCompressionETC2( textureCompressionETC2_ ) + , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) + , textureCompressionBC( textureCompressionBC_ ) + , occlusionQueryPrecise( occlusionQueryPrecise_ ) + , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) + , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) + , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) + , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) + , shaderImageGatherExtended( shaderImageGatherExtended_ ) + , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) + , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) + , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) + , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) + , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) + , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) + , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) + , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) + , shaderClipDistance( shaderClipDistance_ ) + , shaderCullDistance( shaderCullDistance_ ) + , shaderFloat64( shaderFloat64_ ) + , shaderInt64( shaderInt64_ ) + , shaderInt16( shaderInt16_ ) + , shaderResourceResidency( shaderResourceResidency_ ) + , shaderResourceMinLod( shaderResourceMinLod_ ) + , sparseBinding( sparseBinding_ ) + , sparseResidencyBuffer( sparseResidencyBuffer_ ) + , sparseResidencyImage2D( sparseResidencyImage2D_ ) + , sparseResidencyImage3D( sparseResidencyImage3D_ ) + , sparseResidency2Samples( sparseResidency2Samples_ ) + , sparseResidency4Samples( sparseResidency4Samples_ ) + , sparseResidency8Samples( sparseResidency8Samples_ ) + , sparseResidency16Samples( sparseResidency16Samples_ ) + , sparseResidencyAliased( sparseResidencyAliased_ ) + , variableMultisampleRate( variableMultisampleRate_ ) + , inheritedQueries( inheritedQueries_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT + { + imageCubeArray = imageCubeArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT + { + independentBlend = independentBlend_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT + { + geometryShader = geometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + tessellationShader = tessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT + { + sampleRateShading = sampleRateShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthClamp = depthClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT + { + depthBounds = depthBounds_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT + { + wideLines = wideLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT + { + largePoints = largePoints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOne = alphaToOne_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT + { + multiViewport = multiViewport_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt64 = shaderInt64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt16 = shaderInt16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT + { + sparseBinding = sparseBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT + { + inheritedQueries = inheritedQueries_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( robustBufferAccess, + fullDrawIndexUint32, + imageCubeArray, + independentBlend, + geometryShader, + tessellationShader, + sampleRateShading, + dualSrcBlend, + logicOp, + multiDrawIndirect, + drawIndirectFirstInstance, + depthClamp, + depthBiasClamp, + fillModeNonSolid, + depthBounds, + wideLines, + largePoints, + alphaToOne, + multiViewport, + samplerAnisotropy, + textureCompressionETC2, + textureCompressionASTC_LDR, + textureCompressionBC, + occlusionQueryPrecise, + pipelineStatisticsQuery, + vertexPipelineStoresAndAtomics, + fragmentStoresAndAtomics, + shaderTessellationAndGeometryPointSize, + shaderImageGatherExtended, + shaderStorageImageExtendedFormats, + shaderStorageImageMultisample, + shaderStorageImageReadWithoutFormat, + shaderStorageImageWriteWithoutFormat, + shaderUniformBufferArrayDynamicIndexing, + shaderSampledImageArrayDynamicIndexing, + shaderStorageBufferArrayDynamicIndexing, + shaderStorageImageArrayDynamicIndexing, + shaderClipDistance, + shaderCullDistance, + shaderFloat64, + shaderInt64, + shaderInt16, + shaderResourceResidency, + shaderResourceMinLod, + sparseBinding, + sparseResidencyBuffer, + sparseResidencyImage2D, + sparseResidencyImage3D, + sparseResidency2Samples, + sparseResidency4Samples, + sparseResidency8Samples, + sparseResidency16Samples, + sparseResidencyAliased, + variableMultisampleRate, + inheritedQueries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && + ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) && + ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && + ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && + ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && + ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && + ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && + ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && + ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && + ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) && + ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && + ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && + ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && + ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && + ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == + sizeof( VkPhysicalDeviceFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFeatures is not nothrow_move_constructible!" ); + + struct DeviceCreateInfo + { + using NativeType = VkDeviceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueCreateInfoCount( queueCreateInfoCount_ ) + , pQueueCreateInfos( pQueueCreateInfos_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , pEnabledFeatures( pEnabledFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo( + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueCreateInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} ) + : flags( flags_ ) + , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) + , pQueueCreateInfos( queueCreateInfos_.data() ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + , pEnabledFeatures( pEnabledFeatures_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setQueueCreateInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setPEnabledExtensionNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + queueCreateInfoCount, + pQueueCreateInfos, + enabledLayerCount, + ppEnabledLayerNames, + enabledExtensionCount, + ppEnabledExtensionNames, + pEnabledFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && + [this, rhs] + { + bool equal = true; + for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i ) + { + equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || + ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) ); + } + return equal; + }() && ( enabledExtensionCount == rhs.enabledExtensionCount ) && + [this, rhs] + { + bool equal = true; + for ( size_t i = 0; equal && ( i < enabledExtensionCount ); ++i ) + { + equal = ( ( ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i] ) || + ( strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ) == 0 ) ); + } + return equal; + }() && ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceCreateInfo; + }; + + struct DeviceDeviceMemoryReportCreateInfoEXT + { + using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceDeviceMemoryReportCreateInfoEXT & + operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT & + operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnUserCallback, pUserData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const & ) const = default; +#else + bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); +# endif + } + + bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == + sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceDeviceMemoryReportCreateInfoEXT; + }; + + struct DeviceDiagnosticsConfigCreateInfoNV + { + using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceDiagnosticsConfigCreateInfoNV & + operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV & + operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == + sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceDiagnosticsConfigCreateInfoNV; + }; + + struct DeviceEventInfoEXT + { + using NativeType = VkDeviceEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT + : deviceEvent( deviceEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & + setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT + { + deviceEvent = deviceEvent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceEvent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceEventInfoEXT const & ) const = default; +#else + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); +# endif + } + + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceEventInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceEventInfoEXT; + }; + + struct DeviceGroupBindSparseInfo + { + using NativeType = VkDeviceGroupBindSparseInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, + uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceDeviceIndex( resourceDeviceIndex_ ) + , memoryDeviceIndex( memoryDeviceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & + setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & + setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; +#else + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); +# endif + } + + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == + sizeof( VkDeviceGroupBindSparseInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupBindSparseInfo; + }; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + struct DeviceGroupCommandBufferBeginInfo + { + using NativeType = VkDeviceGroupCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupCommandBufferBeginInfo & + operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & + setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == + sizeof( VkDeviceGroupCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupCommandBufferBeginInfo; + }; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + struct DeviceGroupDeviceCreateInfo + { + using NativeType = VkDeviceGroupDeviceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( + uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + physicalDevices_ ) + : physicalDeviceCount( static_cast( physicalDevices_.size() ) ) + , pPhysicalDevices( physicalDevices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + physicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); +# endif + } + + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == + sizeof( VkDeviceGroupDeviceCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupDeviceCreateInfo; + }; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + struct DeviceGroupPresentCapabilitiesKHR + { + using NativeType = VkDeviceGroupPresentCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupPresentCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( + std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT + : presentMask( presentMask_ ) + , modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupPresentCapabilitiesKHR & + operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentMask, modes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && + ( modes == rhs.modes ); +# endif + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == + sizeof( VkDeviceGroupPresentCapabilitiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentCapabilitiesKHR; + }; + + struct DeviceGroupPresentInfoKHR + { + using NativeType = VkDeviceGroupPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) + : swapchainCount( static_cast( deviceMasks_.size() ) ) + , pDeviceMasks( deviceMasks_.data() ) + , mode( mode_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & + setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & + setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR & setDeviceMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode ); +# endif + } + + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == + sizeof( VkDeviceGroupPresentInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupPresentInfoKHR; + }; + + struct DeviceGroupRenderPassBeginInfo + { + using NativeType = VkDeviceGroupRenderPassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {} ) + VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo( + uint32_t deviceMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) + , pDeviceRenderAreas( deviceRenderAreas_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupRenderPassBeginInfo & + operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) + VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && + ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); +# endif + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == + sizeof( VkDeviceGroupRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupRenderPassBeginInfo; + }; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupSubmitInfo + { + using NativeType = VkDeviceGroupSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) + , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) + , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) + , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) + , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) + VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & + setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphoreDeviceIndices, + commandBufferCount, + pCommandBufferDeviceMasks, + signalSemaphoreCount, + pSignalSemaphoreDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && + ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); +# endif + } + + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupSubmitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSubmitInfo; + }; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupSwapchainCreateInfoKHR + { + using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceGroupSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupSwapchainCreateInfoKHR & + operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & + setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, modes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); +# endif + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == + sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceGroupSwapchainCreateInfoKHR; + }; + + struct ImageCreateInfo + { + using NativeType = VkImageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , initialLayout( initialLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageType imageType_, + VULKAN_HPP_NAMESPACE::Format format_, + VULKAN_HPP_NAMESPACE::Extent3D extent_, + uint32_t mipLevels_, + uint32_t arrayLayers_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , initialLayout( initialLayout_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT + { + imageType = imageType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT + { + mipLevels = mipLevels_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayers = arrayLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & + setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + imageType, + format, + extent, + mipLevels, + arrayLayers, + samples, + tiling, + usage, + sharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + initialLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCreateInfo const & ) const = default; +#else + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) && + ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout ); +# endif + } + + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageCreateInfo; + }; + + struct DeviceImageMemoryRequirements + { + using NativeType = VkDeviceImageMemoryRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : pCreateInfo( pCreateInfo_ ) + , planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceImageMemoryRequirements & + operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & + setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceImageMemoryRequirements const & ) const = default; +#else + bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && + ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == + sizeof( VkDeviceImageMemoryRequirements ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceImageMemoryRequirements is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceImageMemoryRequirements; + }; + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + struct DeviceMemoryOpaqueCaptureAddressInfo + { + using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryOpaqueCaptureAddressInfo & + operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo & + operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; +#else + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == + sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOpaqueCaptureAddressInfo; + }; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + struct DeviceMemoryOverallocationCreateInfoAMD + { + using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT + : overallocationBehavior( overallocationBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOverallocationCreateInfoAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryOverallocationCreateInfoAMD & + operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD & + operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, overallocationBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( overallocationBehavior == rhs.overallocationBehavior ); +# endif + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == + sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryOverallocationCreateInfoAMD; + }; + + struct DeviceMemoryReportCallbackDataEXT + { + using NativeType = VkDeviceMemoryReportCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceMemoryReportCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , type( type_ ) + , memoryObjectId( memoryObjectId_ ) + , size( size_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryReportCallbackDataEXT & + operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && + ( memoryObjectId == rhs.memoryObjectId ) && ( size == rhs.size ) && ( objectType == rhs.objectType ) && + ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == + sizeof( VkDeviceMemoryReportCallbackDataEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceMemoryReportCallbackDataEXT; + }; + + struct DevicePrivateDataCreateInfo + { + using NativeType = VkDevicePrivateDataCreateInfo; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {} ) VULKAN_HPP_NOEXCEPT + : privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePrivateDataCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & + setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT + { + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateDataSlotRequestCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default; +#else + bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); +# endif + } + + bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo; + const void * pNext = {}; + uint32_t privateDataSlotRequestCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == + sizeof( VkDevicePrivateDataCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DevicePrivateDataCreateInfo; + }; + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + + struct DeviceQueueGlobalPriorityCreateInfoKHR + { + using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow ) VULKAN_HPP_NOEXCEPT + : globalPriority( globalPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueGlobalPriorityCreateInfoKHR & + operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoKHR & + operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); +# endif + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == + sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceQueueGlobalPriorityCreateInfoKHR; + }; + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; + + struct DeviceQueueInfo2 + { + using NativeType = VkDeviceQueueInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & + setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueInfo2 const & ) const = default; +#else + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex ); +# endif + } + + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DeviceQueueInfo2; + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + struct DirectFBSurfaceCreateInfoEXT + { + using NativeType = VkDirectFBSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, + IDirectFB * dfb_ = {}, + IDirectFBSurface * surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dfb( dfb_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT + { + dfb = dfb_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dfb, surface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && + ( surface == rhs.surface ); +# endif + } + + bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB * dfb = {}; + IDirectFBSurface * surface = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == + sizeof( VkDirectFBSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DirectFBSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + struct DispatchIndirectCommand + { + using NativeType = VkDispatchIndirectCommand; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchIndirectCommand const & ) const = default; +#else + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); +# endif + } + + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == + sizeof( VkDispatchIndirectCommand ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchIndirectCommand is not nothrow_move_constructible!" ); + + struct DisplayEventInfoEXT + { + using NativeType = VkDisplayEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT + : displayEvent( displayEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & + setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + { + displayEvent = displayEvent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayEvent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; +#else + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); +# endif + } + + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayEventInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayEventInfoEXT; + }; + + struct DisplayModeParametersKHR + { + using NativeType = VkDisplayModeParametersKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, + uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & + setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT + { + visibleRegion = visibleRegion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + { + refreshRate = refreshRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( visibleRegion, refreshRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeParametersKHR const & ) const = default; +#else + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); +# endif + } + + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == + sizeof( VkDisplayModeParametersKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeParametersKHR is not nothrow_move_constructible!" ); + + struct DisplayModeCreateInfoKHR + { + using NativeType = VkDisplayModeCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & + setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + { + parameters = parameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, parameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( parameters == rhs.parameters ); +# endif + } + + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == + sizeof( VkDisplayModeCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayModeCreateInfoKHR; + }; + + struct DisplayModePropertiesKHR + { + using NativeType = VkDisplayModePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode( displayMode_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( displayMode, parameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); +# endif + } + + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == + sizeof( VkDisplayModePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModePropertiesKHR is not nothrow_move_constructible!" ); + + struct DisplayModeProperties2KHR + { + using NativeType = VkDisplayModeProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayModeProperties( displayModeProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayModeProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); +# endif + } + + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == + sizeof( VkDisplayModeProperties2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeProperties2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; + + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT + : localDimmingSupport( localDimmingSupport_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayNativeHdrSurfaceCapabilitiesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD & + operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, localDimmingSupport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); +# endif + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == + sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + struct DisplayPlaneCapabilitiesKHR + { + using NativeType = VkDisplayPlaneCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha( supportedAlpha_ ) + , minSrcPosition( minSrcPosition_ ) + , maxSrcPosition( maxSrcPosition_ ) + , minSrcExtent( minSrcExtent_ ) + , maxSrcExtent( maxSrcExtent_ ) + , minDstPosition( minDstPosition_ ) + , maxDstPosition( maxDstPosition_ ) + , minDstExtent( minDstExtent_ ) + , maxDstExtent( maxDstExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( supportedAlpha, + minSrcPosition, + maxSrcPosition, + minSrcExtent, + maxSrcExtent, + minDstPosition, + maxDstPosition, + minDstExtent, + maxDstExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && + ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) && + ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && + ( maxDstExtent == rhs.maxDstExtent ); +# endif + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == + sizeof( VkDisplayPlaneCapabilitiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" ); + + struct DisplayPlaneCapabilities2KHR + { + using NativeType = VkDisplayPlaneCapabilities2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilities( capabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, capabilities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); +# endif + } + + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == + sizeof( VkDisplayPlaneCapabilities2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneCapabilities2KHR; + }; + + struct DisplayPlaneInfo2KHR + { + using NativeType = VkDisplayPlaneInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, + uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : mode( mode_ ) + , planeIndex( planeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & + setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, planeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && + ( planeIndex == rhs.planeIndex ); +# endif + } + + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneInfo2KHR; + }; + + struct DisplayPlanePropertiesKHR + { + using NativeType = VkDisplayPlanePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay( currentDisplay_ ) + , currentStackIndex( currentStackIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( currentDisplay, currentStackIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); +# endif + } + + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == + sizeof( VkDisplayPlanePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" ); + + struct DisplayPlaneProperties2KHR + { + using NativeType = VkDisplayPlaneProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPlaneProperties( displayPlaneProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayPlaneProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( displayPlaneProperties == rhs.displayPlaneProperties ); +# endif + } + + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == + sizeof( VkDisplayPlaneProperties2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayPlaneProperties2KHR; + }; + + struct DisplayPowerInfoEXT + { + using NativeType = VkDisplayPowerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT + : powerState( powerState_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & + setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, powerState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); +# endif + } + + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPowerInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; + }; + + struct DisplayPresentInfoKHR + { + using NativeType = VkDisplayPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & + setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & + setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & + setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcRect, dstRect, persistent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && + ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); +# endif + } + + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPresentInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayPresentInfoKHR; + }; + + struct DisplayPropertiesKHR + { + using NativeType = VkDisplayPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display( display_ ) + , displayName( displayName_ ) + , physicalDimensions( physicalDimensions_ ) + , physicalResolution( physicalResolution_ ) + , supportedTransforms( supportedTransforms_ ) + , planeReorderPossible( planeReorderPossible_ ) + , persistentContent( persistentContent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( display, + displayName, + physicalDimensions, + physicalResolution, + supportedTransforms, + planeReorderPossible, + persistentContent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = display <=> rhs.display; cmp != 0 ) + return cmp; + if ( displayName != rhs.displayName ) + if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) + return cmp; + if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) + return cmp; + if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) + return cmp; + if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) + return cmp; + if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) && + ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && + ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPropertiesKHR is not nothrow_move_constructible!" ); + + struct DisplayProperties2KHR + { + using NativeType = VkDisplayProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayProperties( displayProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayProperties2KHR const & ) const = default; +#else + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); +# endif + } + + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayProperties2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplayProperties2KHR; + }; + + struct DisplaySurfaceCreateInfoKHR + { + using NativeType = VkDisplaySurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) && + ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == + sizeof( VkDisplaySurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DisplaySurfaceCreateInfoKHR; + }; + + struct DrawIndexedIndirectCommand + { + using NativeType = VkDrawIndexedIndirectCommand; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) && + ( firstInstance == rhs.firstInstance ); +# endif + } + + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == + sizeof( VkDrawIndexedIndirectCommand ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndexedIndirectCommand is not nothrow_move_constructible!" ); + + struct DrawIndirectCommand + { + using NativeType = VkDrawIndirectCommand; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( vertexCount, instanceCount, firstVertex, firstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && + ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance ); +# endif + } + + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndirectCommand is not nothrow_move_constructible!" ); + + struct DrawMeshTasksIndirectCommandNV + { + using NativeType = VkDrawMeshTasksIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, + uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount( taskCount_ ) + , firstTask( firstTask_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawMeshTasksIndirectCommandNV & + operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( taskCount, firstTask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); +# endif + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == + sizeof( VkDrawMeshTasksIndirectCommandNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" ); + + struct DrmFormatModifierProperties2EXT + { + using NativeType = VkDrmFormatModifierProperties2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierProperties2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierProperties2EXT & + operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif + } + + bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == + sizeof( VkDrmFormatModifierProperties2EXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" ); + + struct DrmFormatModifierPropertiesEXT + { + using NativeType = VkDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR + DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesEXT & + operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == + sizeof( VkDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" ); + + struct DrmFormatModifierPropertiesList2EXT + { + using NativeType = VkDrmFormatModifierPropertiesList2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDrmFormatModifierPropertiesList2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( + uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DrmFormatModifierPropertiesList2EXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + drmFormatModifierProperties_ ) + : drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) + , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesList2EXT & + operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesList2EXT & + operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == + sizeof( VkDrmFormatModifierPropertiesList2EXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesList2EXT; + }; + + struct DrmFormatModifierPropertiesListEXT + { + using NativeType = VkDrmFormatModifierPropertiesListEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eDrmFormatModifierPropertiesListEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( + uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DrmFormatModifierPropertiesListEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + drmFormatModifierProperties_ ) + : drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) + , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesListEXT & + operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT & + operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == + sizeof( VkDrmFormatModifierPropertiesListEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + struct EventCreateInfo + { + using NativeType = VkEventCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : EventCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; +#else + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "EventCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = EventCreateInfo; + }; + + struct ExportFenceCreateInfo + { + using NativeType = VkExportFenceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceCreateInfo const & ) const = default; +#else + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportFenceCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportFenceCreateInfo; + }; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportFenceWin32HandleInfoKHR + { + using NativeType = VkExportFenceWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportFenceWin32HandleInfoKHR & + operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & + setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == + sizeof( VkExportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportMemoryAllocateInfo + { + using NativeType = VkExportMemoryAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == + sizeof( VkExportMemoryAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfo; + }; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + struct ExportMemoryAllocateInfoNV + { + using NativeType = VkExportMemoryAllocateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == + sizeof( VkExportMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoKHR + { + using NativeType = VkExportMemoryWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryWin32HandleInfoKHR & + operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & + setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == + sizeof( VkExportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoNV + { + using NativeType = VkExportMemoryWin32HandleInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryWin32HandleInfoNV & + operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & + setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ); +# endif + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == + sizeof( VkExportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportSemaphoreCreateInfo + { + using NativeType = VkExportSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == + sizeof( VkExportSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreCreateInfo; + }; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportSemaphoreWin32HandleInfoKHR + { + using NativeType = VkExportSemaphoreWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eExportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportSemaphoreWin32HandleInfoKHR & + operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & + setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && + ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == + sizeof( VkExportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExtensionProperties + { + using NativeType = VkExtensionProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName( extensionName_ ) + , specVersion( specVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExtensionProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( extensionName, specVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExtensionProperties const & ) const = default; +#else + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); +# endif + } + + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExtensionProperties is not nothrow_move_constructible!" ); + + struct ExternalMemoryProperties + { + using NativeType = VkExternalMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryProperties const & ) const = default; +#else + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif + } + + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == + sizeof( VkExternalMemoryProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryProperties is not nothrow_move_constructible!" ); + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + struct ExternalBufferProperties + { + using NativeType = VkExternalBufferProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalBufferProperties const & ) const = default; +#else + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif + } + + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == + sizeof( VkExternalBufferProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalBufferProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalBufferProperties; + }; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + struct ExternalFenceProperties + { + using NativeType = VkExternalFenceProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalFenceFeatures( externalFenceFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFenceProperties const & ) const = default; +#else + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalFenceFeatures == rhs.externalFenceFeatures ); +# endif + } + + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == + sizeof( VkExternalFenceProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFenceProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalFenceProperties; + }; + using ExternalFencePropertiesKHR = ExternalFenceProperties; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ExternalFormatANDROID + { + using NativeType = VkExternalFormatANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : externalFormat( externalFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); +# endif + } + + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFormatANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalFormatANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ExternalImageFormatProperties + { + using NativeType = VkExternalImageFormatProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalImageFormatProperties & + operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; +#else + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif + } + + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == + sizeof( VkExternalImageFormatProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExternalImageFormatProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ImageFormatProperties + { + using NativeType = VkImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent( maxExtent_ ) + , maxMipLevels( maxMipLevels_ ) + , maxArrayLayers( maxArrayLayers_ ) + , sampleCounts( sampleCounts_ ) + , maxResourceSize( maxResourceSize_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties const & ) const = default; +#else + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && + ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) && + ( maxResourceSize == rhs.maxResourceSize ); +# endif + } + + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatProperties is not nothrow_move_constructible!" ); + + struct ExternalImageFormatPropertiesNV + { + using NativeType = VkExternalImageFormatPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + , externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalImageFormatPropertiesNV & + operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( imageFormatProperties == rhs.imageFormatProperties ) && + ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif + } + + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == + sizeof( VkExternalImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" ); + + struct ExternalMemoryBufferCreateInfo + { + using NativeType = VkExternalMemoryBufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryBufferCreateInfo & + operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == + sizeof( VkExternalMemoryBufferCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryBufferCreateInfo; + }; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExternalMemoryImageCreateInfo + { + using NativeType = VkExternalMemoryImageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryImageCreateInfo & + operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == + sizeof( VkExternalMemoryImageCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfo; + }; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryImageCreateInfoNV + { + using NativeType = VkExternalMemoryImageCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryImageCreateInfoNV & + operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == + sizeof( VkExternalMemoryImageCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfoNV; + }; + + struct ExternalSemaphoreProperties + { + using NativeType = VkExternalSemaphoreProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR + ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && + ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); +# endif + } + + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == + sizeof( VkExternalSemaphoreProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ExternalSemaphoreProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ExternalSemaphoreProperties; + }; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + struct FenceCreateInfo + { + using NativeType = VkFenceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; +#else + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FenceCreateInfo; + }; + + struct FenceGetFdInfoKHR + { + using NativeType = VkFenceGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceGetFdInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FenceGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct FenceGetWin32HandleInfoKHR + { + using NativeType = VkFenceGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & + setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == + sizeof( VkFenceGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FenceGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct FilterCubicImageViewImageFormatPropertiesEXT + { + using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT + : filterCubic( filterCubic_ ) + , filterCubicMinmax( filterCubicMinmax_ ) + {} + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( + FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : FilterCubicImageViewImageFormatPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FilterCubicImageViewImageFormatPropertiesEXT & + operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT & + operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterCubic, filterCubicMinmax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && + ( filterCubicMinmax == rhs.filterCubicMinmax ); +# endif + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) == + sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FilterCubicImageViewImageFormatPropertiesEXT; + }; + + struct FormatProperties + { + using NativeType = VkFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; +#else + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); +# endif + } + + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties is not nothrow_move_constructible!" ); + + struct FormatProperties2 + { + using NativeType = VkFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; +#else + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); +# endif + } + + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FormatProperties2; + }; + using FormatProperties2KHR = FormatProperties2; + + struct FormatProperties3 + { + using NativeType = VkFormatProperties3; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT + : FormatProperties3( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties3 const & ) const = default; +#else + bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); +# endif + } + + bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties3 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FormatProperties3; + }; + using FormatProperties3KHR = FormatProperties3; + + struct FragmentShadingRateAttachmentInfoKHR + { + using NativeType = VkFragmentShadingRateAttachmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT + : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ) + , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + {} + + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FragmentShadingRateAttachmentInfoKHR & + operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR & + operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( + VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); +# endif + } + + bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FragmentShadingRateAttachmentInfoKHR; + }; + + struct FramebufferAttachmentImageInfo + { + using NativeType = VkFramebufferAttachmentImageInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo( + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + uint32_t width_, + uint32_t height_, + uint32_t layerCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( static_cast( viewFormats_.size() ) ) + , pViewFormats( viewFormats_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferAttachmentImageInfo & + operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & + setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & + setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & + setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo & setViewFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && + ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); +# endif + } + + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == + sizeof( VkFramebufferAttachmentImageInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentImageInfo; + }; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + struct FramebufferAttachmentsCreateInfo + { + using NativeType = VkFramebufferAttachmentsCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( + uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentImageInfoCount( attachmentImageInfoCount_ ) + , pAttachmentImageInfos( pAttachmentImageInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR + FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentImageInfos_ ) + : attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) + , pAttachmentImageInfos( attachmentImageInfos_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferAttachmentsCreateInfo & + operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & + setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); +# endif + } + + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == + sizeof( VkFramebufferAttachmentsCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FramebufferAttachmentsCreateInfo; + }; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + struct FramebufferCreateInfo + { + using NativeType = VkFramebufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo( + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {} ) + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & + setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + { + layers = layers_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferCreateInfo const & ) const = default; +#else + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); +# endif + } + + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FramebufferCreateInfo; + }; + + struct FramebufferMixedSamplesCombinationNV + { + using NativeType = VkFramebufferMixedSamplesCombinationNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eFramebufferMixedSamplesCombinationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + , rasterizationSamples( rasterizationSamples_ ) + , depthStencilSamples( depthStencilSamples_ ) + , colorSamples( colorSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferMixedSamplesCombinationNV & + operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV & + operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); +# endif + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == + sizeof( VkFramebufferMixedSamplesCombinationNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = FramebufferMixedSamplesCombinationNV; + }; + + struct IndirectCommandsStreamNV + { + using NativeType = VkIndirectCommandsStreamNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == + sizeof( VkIndirectCommandsStreamNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsStreamNV is not nothrow_move_constructible!" ); + + struct GeneratedCommandsInfoNV + { + using NativeType = VkGeneratedCommandsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( streamCount_ ) + , pStreams( pStreams_ ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + streams_, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( static_cast( streams_.size() ) ) + , pStreams( streams_.data() ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT + { + pStreams = pStreams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV & setStreams( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + streams_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT + { + preprocessBuffer = preprocessBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pipelineBindPoint, + pipeline, + indirectCommandsLayout, + streamCount, + pStreams, + sequencesCount, + preprocessBuffer, + preprocessOffset, + preprocessSize, + sequencesCountBuffer, + sequencesCountOffset, + sequencesIndexBuffer, + sequencesIndexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && + ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) && + ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && + ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); +# endif + } + + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == + sizeof( VkGeneratedCommandsInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoNV; + }; + + struct GeneratedCommandsMemoryRequirementsInfoNV + { + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + {} + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsMemoryRequirementsInfoNV & + operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV & + operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && + ( maxSequencesCount == rhs.maxSequencesCount ); +# endif + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == + sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoNV; + }; + + struct VertexInputBindingDescription + { + using NativeType = VkVertexInputBindingDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = + VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputBindingDescription & + operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & + setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, stride, inputRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; +#else + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); +# endif + } + + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == + sizeof( VkVertexInputBindingDescription ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VertexInputBindingDescription is not nothrow_move_constructible!" ); + + struct VertexInputAttributeDescription + { + using NativeType = VkVertexInputAttributeDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputAttributeDescription & + operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( location, binding, format, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); +# endif + } + + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == + sizeof( VkVertexInputAttributeDescription ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VertexInputAttributeDescription is not nothrow_move_constructible!" ); + + struct PipelineVertexInputStateCreateInfo + { + using NativeType = VkPipelineVertexInputStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineVertexInputStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {} ) + VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) + , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) + , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) + , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexBindingDescriptions_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexAttributeDescriptions_ = {} ) + : flags( flags_ ) + , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) + , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) + , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) + , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineVertexInputStateCreateInfo & + operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo & + operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + vertexBindingDescriptionCount, + pVertexBindingDescriptions, + vertexAttributeDescriptionCount, + pVertexAttributeDescriptions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && + ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && + ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); +# endif + } + + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == + sizeof( VkPipelineVertexInputStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineVertexInputStateCreateInfo; + }; + + struct PipelineInputAssemblyStateCreateInfo + { + using NativeType = VkPipelineInputAssemblyStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineInputAssemblyStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , topology( topology_ ) + , primitiveRestartEnable( primitiveRestartEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineInputAssemblyStateCreateInfo & + operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo & + operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + { + topology = topology_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, topology, primitiveRestartEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); +# endif + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == + sizeof( VkPipelineInputAssemblyStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineInputAssemblyStateCreateInfo; + }; + + struct PipelineTessellationStateCreateInfo + { + using NativeType = VkPipelineTessellationStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineTessellationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , patchControlPoints( patchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineTessellationStateCreateInfo & + operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo & + operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + patchControlPoints = patchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, patchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( patchControlPoints == rhs.patchControlPoints ); +# endif + } + + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == + sizeof( VkPipelineTessellationStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineTessellationStateCreateInfo; + }; + + struct PipelineViewportStateCreateInfo + { + using NativeType = VkPipelineViewportStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewports( pViewports_ ) + , scissorCount( scissorCount_ ) + , pScissors( pScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {} ) + : flags( flags_ ) + , viewportCount( static_cast( viewports_.size() ) ) + , pViewports( viewports_.data() ) + , scissorCount( static_cast( scissors_.size() ) ) + , pScissors( scissors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportStateCreateInfo & + operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT + { + pViewports = pViewports_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & setViewports( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) + VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = scissorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & + setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT + { + pScissors = pScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) + VULKAN_HPP_NOEXCEPT + { + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) && + ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); +# endif + } + + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == + sizeof( VkPipelineViewportStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportStateCreateInfo; + }; + + struct PipelineRasterizationStateCreateInfo + { + using NativeType = VkPipelineRasterizationStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthClampEnable( depthClampEnable_ ) + , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) + , polygonMode( polygonMode_ ) + , cullMode( cullMode_ ) + , frontFace( frontFace_ ) + , depthBiasEnable( depthBiasEnable_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + , lineWidth( lineWidth_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationStateCreateInfo & + operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo & + operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClampEnable = depthClampEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + { + polygonMode = polygonMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + { + cullMode = cullMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + { + frontFace = frontFace_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT + { + lineWidth = lineWidth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + depthClampEnable, + rasterizerDiscardEnable, + polygonMode, + cullMode, + frontFace, + depthBiasEnable, + depthBiasConstantFactor, + depthBiasClamp, + depthBiasSlopeFactor, + lineWidth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && + ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) && + ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && + ( lineWidth == rhs.lineWidth ); +# endif + } + + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == + sizeof( VkPipelineRasterizationStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationStateCreateInfo; + }; + + struct PipelineMultisampleStateCreateInfo + { + using NativeType = VkPipelineMultisampleStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineMultisampleStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rasterizationSamples( rasterizationSamples_ ) + , sampleShadingEnable( sampleShadingEnable_ ) + , minSampleShading( minSampleShading_ ) + , pSampleMask( pSampleMask_ ) + , alphaToCoverageEnable( alphaToCoverageEnable_ ) + , alphaToOneEnable( alphaToOneEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineMultisampleStateCreateInfo & + operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo & + operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT + { + minSampleShading = minSampleShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT + { + pSampleMask = pSampleMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + rasterizationSamples, + sampleShadingEnable, + minSampleShading, + pSampleMask, + alphaToCoverageEnable, + alphaToOneEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && + ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && + ( alphaToOneEnable == rhs.alphaToOneEnable ); +# endif + } + + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == + sizeof( VkPipelineMultisampleStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineMultisampleStateCreateInfo; + }; + + struct StencilOpState + { + using NativeType = VkStencilOpState; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + : failOp( failOp_ ) + , passOp( passOp_ ) + , depthFailOp( depthFailOp_ ) + , compareOp( compareOp_ ) + , compareMask( compareMask_ ) + , writeMask( writeMask_ ) + , reference( reference_ ) + {} + + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + : StencilOpState( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT + { + failOp = failOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT + { + passOp = passOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & + setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT + { + depthFailOp = depthFailOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & + setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT + { + compareMask = compareMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT + { + writeMask = writeMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT + { + reference = reference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; +#else + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && + ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && + ( reference == rhs.reference ); +# endif + } + + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StencilOpState is not nothrow_move_constructible!" ); + + struct PipelineDepthStencilStateCreateInfo + { + using NativeType = VkPipelineDepthStencilStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineDepthStencilStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthTestEnable( depthTestEnable_ ) + , depthWriteEnable( depthWriteEnable_ ) + , depthCompareOp( depthCompareOp_ ) + , depthBoundsTestEnable( depthBoundsTestEnable_ ) + , stencilTestEnable( stencilTestEnable_ ) + , front( front_ ) + , back( back_ ) + , minDepthBounds( minDepthBounds_ ) + , maxDepthBounds( maxDepthBounds_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineDepthStencilStateCreateInfo & + operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo & + operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthTestEnable = depthTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + { + depthCompareOp = depthCompareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT + { + front = front_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + { + back = back_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + minDepthBounds = minDepthBounds_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthBounds = maxDepthBounds_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + depthTestEnable, + depthWriteEnable, + depthCompareOp, + depthBoundsTestEnable, + stencilTestEnable, + front, + back, + minDepthBounds, + maxDepthBounds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) && + ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && + ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) && + ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); +# endif + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == + sizeof( VkPipelineDepthStencilStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineDepthStencilStateCreateInfo; + }; + + struct PipelineColorBlendAttachmentState + { + using NativeType = VkPipelineColorBlendAttachmentState; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( + VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + : blendEnable( blendEnable_ ) + , srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + , colorWriteMask( colorWriteMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorBlendAttachmentState & + operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT + { + blendEnable = blendEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteMask = colorWriteMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( blendEnable, + srcColorBlendFactor, + dstColorBlendFactor, + colorBlendOp, + srcAlphaBlendFactor, + dstAlphaBlendFactor, + alphaBlendOp, + colorWriteMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; +#else + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && + ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); +# endif + } + + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == + sizeof( VkPipelineColorBlendAttachmentState ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" ); + + struct PipelineColorBlendStateCreateInfo + { + using NativeType = VkPipelineColorBlendStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineColorBlendStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , blendConstants( blendConstants_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, + std::array const & blendConstants_ = {} ) + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , blendConstants( blendConstants_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorBlendStateCreateInfo & + operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + logicOpEnable = logicOpEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + { + blendConstants = blendConstants_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); +# endif + } + + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == + sizeof( VkPipelineColorBlendStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineColorBlendStateCreateInfo; + }; + + struct PipelineDynamicStateCreateInfo + { + using NativeType = VkPipelineDynamicStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dynamicStateCount( dynamicStateCount_ ) + , pDynamicStates( pDynamicStates_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + : flags( flags_ ) + , dynamicStateCount( static_cast( dynamicStates_.size() ) ) + , pDynamicStates( dynamicStates_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineDynamicStateCreateInfo & + operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & + setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & + setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicStates = pDynamicStates_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo & setDynamicStates( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) + VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates ); +# endif + } + + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == + sizeof( VkPipelineDynamicStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineDynamicStateCreateInfo; + }; + + struct GraphicsPipelineCreateInfo + { + using NativeType = VkGraphicsPipelineCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState( + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState( + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT + { + pViewportState = pViewportState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState( + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + { + pRasterizationState = pRasterizationState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState( + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + { + pMultisampleState = pMultisampleState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState( + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState( + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + { + pColorBlendState = pColorBlendState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + pVertexInputState, + pInputAssemblyState, + pTessellationState, + pViewportState, + pRasterizationState, + pMultisampleState, + pDepthStencilState, + pColorBlendState, + pDynamicState, + layout, + renderPass, + subpass, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; +#else + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == + sizeof( VkGraphicsPipelineCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GraphicsPipelineCreateInfo; + }; + + struct GraphicsShaderGroupCreateInfoNV + { + using NativeType = VkGraphicsShaderGroupCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT + : stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} + + VULKAN_HPP_CONSTEXPR + GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) + : stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsShaderGroupCreateInfoNV & + operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState( + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState( + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && + ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) && + ( pTessellationState == rhs.pTessellationState ); +# endif + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == + sizeof( VkGraphicsShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GraphicsShaderGroupCreateInfoNV; + }; + + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , pipelineCount( pipelineCount_ ) + , pPipelines( pPipelines_ ) + {} + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( + GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : GraphicsPipelineShaderGroupsCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + groups_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {} ) + : groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , pipelineCount( static_cast( pipelines_.size() ) ) + , pPipelines( pipelines_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV & + operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) + VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); +# endif + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == + sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + }; + + struct XYColorEXT + { + using NativeType = VkXYColorEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : XYColorEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XYColorEXT const & ) const = default; +#else + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XYColorEXT is not nothrow_move_constructible!" ); + + struct HdrMetadataEXT + { + using NativeType = VkHdrMetadataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + {} + + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & + setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & + setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & + setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & + setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & + setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + displayPrimaryRed, + displayPrimaryGreen, + displayPrimaryBlue, + whitePoint, + maxLuminance, + minLuminance, + maxContentLightLevel, + maxFrameAverageLightLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrMetadataEXT const & ) const = default; +#else + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && + ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) && + ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); +# endif + } + + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrMetadataEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = HdrMetadataEXT; + }; + + struct HeadlessSurfaceCreateInfoEXT + { + using NativeType = VkHeadlessSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) + VULKAN_HPP_NOEXCEPT : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == + sizeof( VkHeadlessSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = HeadlessSurfaceCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + struct IOSSurfaceCreateInfoMVK + { + using NativeType = VkIOSSurfaceCreateInfoMVK; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & + setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pView ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif + } + + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == + sizeof( VkIOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = IOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + struct ImageBlit + { + using NativeType = VkImageBlit; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & + setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & + setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit const & ) const = default; +#else + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); +# endif + } + + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageBlit is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImageFormatConstraintsInfoFUCHSIA + { + using NativeType = VkImageFormatConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageFormatConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, + uint64_t sysmemPixelFormat_ = {}, + uint32_t colorSpaceCount_ = {}, + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {} ) VULKAN_HPP_NOEXCEPT + : imageCreateInfo( imageCreateInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , flags( flags_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , colorSpaceCount( colorSpaceCount_ ) + , pColorSpaces( pColorSpaces_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatConstraintsInfoFUCHSIA & + operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + imageCreateInfo = imageCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + requiredFormatFeatures = requiredFormatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT + { + sysmemPixelFormat = sysmemPixelFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT + { + colorSpaceCount = colorSpaceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT + { + pColorSpaces = pColorSpaces_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + imageCreateInfo, + requiredFormatFeatures, + flags, + sysmemPixelFormat, + colorSpaceCount, + pColorSpaces ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) && + ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && + ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( colorSpaceCount == rhs.colorSpaceCount ) && + ( pColorSpaces == rhs.pColorSpaces ); +# endif + } + + bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {}; + uint64_t sysmemPixelFormat = {}; + uint32_t colorSpaceCount = {}; + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == + sizeof( VkImageFormatConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageFormatConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImageConstraintsInfoFUCHSIA + { + using NativeType = VkImageConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( + uint32_t formatConstraintsCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {} ) VULKAN_HPP_NOEXCEPT + : formatConstraintsCount( formatConstraintsCount_ ) + , pFormatConstraints( pFormatConstraints_ ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {} ) + : formatConstraintsCount( static_cast( formatConstraints_.size() ) ) + , pFormatConstraints( formatConstraints_.data() ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + , flags( flags_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT + { + formatConstraintsCount = formatConstraintsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints( + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT + { + pFormatConstraints = pFormatConstraints_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA & setFormatConstraints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT + { + formatConstraintsCount = static_cast( formatConstraints_.size() ); + pFormatConstraints = formatConstraints_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints( + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) + VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( formatConstraintsCount == rhs.formatConstraintsCount ) && + ( pFormatConstraints == rhs.pFormatConstraints ) && + ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t formatConstraintsCount = {}; + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == + sizeof( VkImageConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImageCopy + { + using NativeType = VkImageCopy; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & + setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & + setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy const & ) const = default; +#else + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCopy is not nothrow_move_constructible!" ); + + struct SubresourceLayout + { + using NativeType = VkSubresourceLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , size( size_ ) + , rowPitch( rowPitch_ ) + , arrayPitch( arrayPitch_ ) + , depthPitch( depthPitch_ ) + {} + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceLayout( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, size, rowPitch, arrayPitch, depthPitch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout const & ) const = default; +#else + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && + ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch ); +# endif + } + + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout is not nothrow_move_constructible!" ); + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , pPlaneLayouts( pPlaneLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( + ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierExplicitCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + planeLayouts_ ) + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) + , pPlaneLayouts( planeLayouts_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT & + operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + planeLayouts_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( pPlaneLayouts == rhs.pPlaneLayouts ); +# endif + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + }; + + struct ImageDrmFormatModifierListCreateInfoEXT + { + using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierListCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifiers( pDrmFormatModifiers_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierListCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) + : drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ) + , pDrmFormatModifiers( drmFormatModifiers_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageDrmFormatModifierListCreateInfoEXT & + operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT & + operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & + setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); +# endif + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierListCreateInfoEXT; + }; + + struct ImageDrmFormatModifierPropertiesEXT + { + using NativeType = VkImageDrmFormatModifierPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageDrmFormatModifierPropertiesEXT & + operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT & + operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); +# endif + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == + sizeof( VkImageDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierPropertiesEXT; + }; + + struct ImageFormatListCreateInfo + { + using NativeType = VkImageFormatListCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + : viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & + setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & + setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo & setViewFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) + VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewFormatCount, pViewFormats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; +#else + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && + ( pViewFormats == rhs.pViewFormats ); +# endif + } + + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == + sizeof( VkImageFormatListCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatListCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageFormatListCreateInfo; + }; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + + struct ImageFormatProperties2 + { + using NativeType = VkImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFormatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties2 const & ) const = default; +#else + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); +# endif + } + + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == + sizeof( VkImageFormatProperties2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatProperties2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageFormatProperties2; + }; + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct ImageMemoryBarrier + { + using NativeType = VkImageMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcAccessMask, + dstAccessMask, + oldLayout, + newLayout, + srcQueueFamilyIndex, + dstQueueFamilyIndex, + image, + subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier const & ) const = default; +#else + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryBarrier is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageMemoryBarrier; + }; + + struct ImageMemoryRequirementsInfo2 + { + using NativeType = VkImageMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & + setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == + sizeof( VkImageMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageMemoryRequirementsInfo2; + }; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imagePipeHandle( imagePipeHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImagePipeSurfaceCreateInfoFUCHSIA & + operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & + setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, imagePipeHandle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == + sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImagePlaneMemoryRequirementsInfo + { + using NativeType = VkImagePlaneMemoryRequirementsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImagePlaneMemoryRequirementsInfo & + operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & + setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == + sizeof( VkImagePlaneMemoryRequirementsInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImagePlaneMemoryRequirementsInfo; + }; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + struct ImageResolve + { + using NativeType = VkImageResolve; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageResolve( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & + setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & + setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & + setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve const & ) const = default; +#else + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageResolve is not nothrow_move_constructible!" ); + + struct ImageResolve2 + { + using NativeType = VkImageResolve2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageResolve2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & + setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & + setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & + setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & + setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & + setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve2 const & ) const = default; +#else + bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && + ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageResolve2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageResolve2; + }; + using ImageResolve2KHR = ImageResolve2; + + struct ImageSparseMemoryRequirementsInfo2 + { + using NativeType = VkImageSparseMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImageSparseMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSparseMemoryRequirementsInfo2 & + operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2 & + operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & + setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == + sizeof( VkImageSparseMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageSparseMemoryRequirementsInfo2; + }; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct ImageStencilUsageCreateInfo + { + using NativeType = VkImageStencilUsageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : stencilUsage( stencilUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & + setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); +# endif + } + + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == + sizeof( VkImageStencilUsageCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageStencilUsageCreateInfo; + }; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + + struct ImageSwapchainCreateInfoKHR + { + using NativeType = VkImageSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & + setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); +# endif + } + + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == + sizeof( VkImageSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageSwapchainCreateInfoKHR; + }; + + struct ImageViewASTCDecodeModeEXT + { + using NativeType = VkImageViewASTCDecodeModeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( + VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : decodeMode( decodeMode_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & + setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); +# endif + } + + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == + sizeof( VkImageViewASTCDecodeModeEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageViewASTCDecodeModeEXT; + }; + + struct ImageViewAddressPropertiesNVX + { + using NativeType = VkImageViewAddressPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewAddressPropertiesNVX & + operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; +#else + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && + ( size == rhs.size ); +# endif + } + + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == + sizeof( VkImageViewAddressPropertiesNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageViewAddressPropertiesNVX; + }; + + struct ImageViewCreateInfo + { + using NativeType = VkImageViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , image( image_ ) + , viewType( viewType_ ) + , format( format_ ) + , components( components_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; +#else + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && + ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageViewCreateInfo; + }; + + struct ImageViewHandleInfoNVX + { + using NativeType = VkImageViewHandleInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , descriptorType( descriptorType_ ) + , sampler( sampler_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & + setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & + setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, descriptorType, sampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && + ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler ); +# endif + } + + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == + sizeof( VkImageViewHandleInfoNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewHandleInfoNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageViewHandleInfoNVX; + }; + + struct ImageViewMinLodCreateInfoEXT + { + using NativeType = VkImageViewMinLodCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {} ) VULKAN_HPP_NOEXCEPT : minLod( minLod_ ) {} + + VULKAN_HPP_CONSTEXPR + ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewMinLodCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; + const void * pNext = {}; + float minLod = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == + sizeof( VkImageViewMinLodCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageViewMinLodCreateInfoEXT; + }; + + struct ImageViewUsageCreateInfo + { + using NativeType = VkImageViewUsageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT + : usage( usage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & + setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == + sizeof( VkImageViewUsageCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewUsageCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImageViewUsageCreateInfo; + }; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ImportAndroidHardwareBufferInfoANDROID + { + using NativeType = VkImportAndroidHardwareBufferInfoANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportAndroidHardwareBufferInfoANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportAndroidHardwareBufferInfoANDROID & + operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID & + operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & + setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == + sizeof( VkImportAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ImportFenceFdInfoKHR + { + using NativeType = VkImportFenceFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, flags, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; +#else + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportFenceFdInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportFenceFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportFenceWin32HandleInfoKHR + { + using NativeType = VkImportFenceWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportFenceWin32HandleInfoKHR & + operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, flags, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && + ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == + sizeof( VkImportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryBufferCollectionFUCHSIA + { + using NativeType = VkImportMemoryBufferCollectionFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportMemoryBufferCollectionFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {} ) VULKAN_HPP_NOEXCEPT + : collection( collection_ ) + , index( index_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryBufferCollectionFUCHSIA & + operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryBufferCollectionFUCHSIA & + operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default; +# else + bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && + ( index == rhs.index ); +# endif + } + + bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == + sizeof( VkImportMemoryBufferCollectionFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportMemoryBufferCollectionFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImportMemoryFdInfoKHR + { + using NativeType = VkImportMemoryFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportMemoryFdInfoKHR; + }; + + struct ImportMemoryHostPointerInfoEXT + { + using NativeType = VkImportMemoryHostPointerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryHostPointerInfoEXT & + operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, pHostPointer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( pHostPointer == rhs.pHostPointer ); +# endif + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == + sizeof( VkImportMemoryHostPointerInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportMemoryHostPointerInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoKHR + { + using NativeType = VkImportMemoryWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryWin32HandleInfoKHR & + operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == + sizeof( VkImportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoNV + { + using NativeType = VkImportMemoryWin32HandleInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryWin32HandleInfoNV & + operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ); +# endif + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == + sizeof( VkImportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryZirconHandleInfoFUCHSIA + { + using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportMemoryZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + zx_handle_t handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryZirconHandleInfoFUCHSIA & + operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA & + operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + zx_handle_t handle = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == + sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportMemoryZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImportSemaphoreFdInfoKHR + { + using NativeType = VkImportSemaphoreFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; +#else + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == + sizeof( VkImportSemaphoreFdInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportSemaphoreWin32HandleInfoKHR + { + using NativeType = VkImportSemaphoreWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR + ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportSemaphoreWin32HandleInfoKHR & + operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && + ( name == rhs.name ); +# endif + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == + sizeof( VkImportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportSemaphoreZirconHandleInfoFUCHSIA + { + using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImportSemaphoreZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + zx_handle_t zirconHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , zirconHandle( zirconHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreZirconHandleInfoFUCHSIA( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportSemaphoreZirconHandleInfoFUCHSIA & + operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA & + operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT + { + zirconHandle = zirconHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + zx_handle_t zirconHandle = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == + sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct IndirectCommandsLayoutTokenNV + { + using NativeType = VkIndirectCommandsLayoutTokenNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT + : tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( indexTypeCount_ ) + , pIndexTypes( pIndexTypes_ ) + , pIndexTypeValues( pIndexTypeValues_ ) + {} + + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, + uint32_t stream_, + uint32_t offset_, + uint32_t vertexBindingUnit_, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, + uint32_t pushconstantOffset_, + uint32_t pushconstantSize_, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {} ) + : tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( static_cast( indexTypes_.size() ) ) + , pIndexTypes( indexTypes_.data() ) + , pIndexTypeValues( indexTypeValues_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +# else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsLayoutTokenNV & + operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT + { + stream = stream_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexDynamicStride = vertexDynamicStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantOffset = pushconstantOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantSize = pushconstantSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = indexTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & setIndexTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) + VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypeValues = pIndexTypeValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & setIndexTypeValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + tokenType, + stream, + offset, + vertexBindingUnit, + vertexDynamicStride, + pushconstantPipelineLayout, + pushconstantShaderStageFlags, + pushconstantOffset, + pushconstantSize, + indirectStateFlags, + indexTypeCount, + pIndexTypes, + pIndexTypeValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && + ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) && + ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && + ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && + ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues ); +# endif + } + + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == + sizeof( VkIndirectCommandsLayoutTokenNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenNV; + }; + + struct IndirectCommandsLayoutCreateInfoNV + { + using NativeType = VkIndirectCommandsLayoutCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eIndirectCommandsLayoutCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( tokenCount_ ) + , pTokens( pTokens_ ) + , streamCount( streamCount_ ) + , pStreamStrides( pStreamStrides_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + tokens_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + , streamCount( static_cast( streamStrides_.size() ) ) + , pStreamStrides( streamStrides_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsLayoutCreateInfoNV & + operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV & + operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & setTokens( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + { + pStreamStrides = pStreamStrides_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & setStreamStrides( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && + ( pStreamStrides == rhs.pStreamStrides ); +# endif + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == + sizeof( VkIndirectCommandsLayoutCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoNV; + }; + + struct InitializePerformanceApiInfoINTEL + { + using NativeType = VkInitializePerformanceApiInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eInitializePerformanceApiInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR + InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + InitializePerformanceApiInfoINTEL & + operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pUserData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; +#else + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); +# endif + } + + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == + sizeof( VkInitializePerformanceApiInfoINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = InitializePerformanceApiInfoINTEL; + }; + + struct InputAttachmentAspectReference + { + using NativeType = VkInputAttachmentAspectReference; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + InputAttachmentAspectReference & + operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & + setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & + setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpass, inputAttachmentIndex, aspectMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InputAttachmentAspectReference const & ) const = default; +#else + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && + ( aspectMask == rhs.aspectMask ); +# endif + } + + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == + sizeof( VkInputAttachmentAspectReference ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "InputAttachmentAspectReference is not nothrow_move_constructible!" ); + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + struct InstanceCreateInfo + { + using NativeType = VkInstanceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + {} + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo( + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {} ) + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & setPEnabledLayerNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & + setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & setPEnabledExtensionNames( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) + VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + pApplicationInfo, + enabledLayerCount, + ppEnabledLayerNames, + enabledExtensionCount, + ppEnabledExtensionNames ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + [this, rhs] + { + bool equal = true; + for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i ) + { + equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || + ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) ); + } + return equal; + }() && ( enabledExtensionCount == rhs.enabledExtensionCount ) && + [this, rhs] + { + bool equal = true; + for ( size_t i = 0; equal && ( i < enabledExtensionCount ); ++i ) + { + equal = ( ( ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i] ) || + ( strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ) == 0 ) ); + } + return equal; + }(); + } + + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InstanceCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = InstanceCreateInfo; + }; + + struct LayerProperties + { + using NativeType = VkLayerProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName( layerName_ ) + , specVersion( specVersion_ ) + , implementationVersion( implementationVersion_ ) + , description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : LayerProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( layerName, specVersion, implementationVersion, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerProperties const & ) const = default; +#else + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && + ( implementationVersion == rhs.implementationVersion ) && ( description == rhs.description ); +# endif + } + + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerProperties is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + struct MacOSSurfaceCreateInfoMVK + { + using NativeType = VkMacOSSurfaceCreateInfoMVK; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR + MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & + setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pView ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == + sizeof( VkMacOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MacOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + struct MappedMemoryRange + { + using NativeType = VkMappedMemoryRange; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + : MappedMemoryRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & + setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MappedMemoryRange const & ) const = default; +#else + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); +# endif + } + + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MappedMemoryRange is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MappedMemoryRange; + }; + + struct MemoryAllocateFlagsInfo + { + using NativeType = VkMemoryAllocateFlagsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & + setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; +#else + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == + sizeof( VkMemoryAllocateFlagsInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryAllocateFlagsInfo; + }; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + struct MemoryAllocateInfo + { + using NativeType = VkMemoryAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & + setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + { + allocationSize = allocationSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateInfo const & ) const = default; +#else + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && + ( memoryTypeIndex == rhs.memoryTypeIndex ); +# endif + } + + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryAllocateInfo; + }; + + struct MemoryBarrier + { + using NativeType = VkMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrier( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier const & ) const = default; +#else + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ); +# endif + } + + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryBarrier is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryBarrier; + }; + + struct MemoryDedicatedAllocateInfo + { + using NativeType = VkMemoryDedicatedAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & + setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & + setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; +#else + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == + sizeof( VkMemoryDedicatedAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryDedicatedAllocateInfo; + }; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct MemoryDedicatedRequirements + { + using NativeType = VkMemoryDedicatedRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) + , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; +#else + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); +# endif + } + + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == + sizeof( VkMemoryDedicatedRequirements ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryDedicatedRequirements is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryDedicatedRequirements; + }; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryFdPropertiesKHR + { + using NativeType = VkMemoryFdPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryFdPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryFdPropertiesKHR; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( + MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + VULKAN_HPP_NOEXCEPT + : MemoryGetAndroidHardwareBufferInfoANDROID( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID & + operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == + sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryGetAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct MemoryGetFdInfoKHR + { + using NativeType = VkMemoryGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; +#else + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetFdInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryGetFdInfoKHR; + }; + + struct MemoryGetRemoteAddressInfoNV + { + using NativeType = VkMemoryGetRemoteAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetRemoteAddressInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default; +#else + bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == + sizeof( VkMemoryGetRemoteAddressInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryGetRemoteAddressInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryGetWin32HandleInfoKHR + { + using NativeType = VkMemoryGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == + sizeof( VkMemoryGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryGetZirconHandleInfoFUCHSIA + { + using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetZirconHandleInfoFUCHSIA & + operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == + sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct MemoryHeap + { + using NativeType = VkMemoryHeap; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHeap( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; +#else + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryHeap is not nothrow_move_constructible!" ); + + struct MemoryHostPointerPropertiesEXT + { + using NativeType = VkMemoryHostPointerPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryHostPointerPropertiesEXT & + operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == + sizeof( VkMemoryHostPointerPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + + struct MemoryOpaqueCaptureAddressAllocateInfo + { + using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : opaqueCaptureAddress( opaqueCaptureAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryOpaqueCaptureAddressAllocateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryOpaqueCaptureAddressAllocateInfo & + operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo & + operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & + setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; +#else + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == + sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + + struct MemoryPriorityAllocateInfoEXT + { + using NativeType = VkMemoryPriorityAllocateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT + : priority( priority_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryPriorityAllocateInfoEXT & + operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); +# endif + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == + sizeof( VkMemoryPriorityAllocateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryPriorityAllocateInfoEXT; + }; + + struct MemoryRequirements + { + using NativeType = VkMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , alignment( alignment_ ) + , memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, alignment, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; +#else + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryRequirements is not nothrow_move_constructible!" ); + + struct MemoryRequirements2 + { + using NativeType = VkMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; +#else + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryRequirements2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + using MemoryRequirements2KHR = MemoryRequirements2; + + struct MemoryType + { + using NativeType = VkMemoryType; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ) + , heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryType( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( propertyFlags, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; +#else + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryType is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryWin32HandlePropertiesKHR + { + using NativeType = VkMemoryWin32HandlePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR + MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryWin32HandlePropertiesKHR & + operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == + sizeof( VkMemoryWin32HandlePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryWin32HandlePropertiesKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryZirconHandlePropertiesFUCHSIA + { + using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryZirconHandlePropertiesFUCHSIA & + operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA & + operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; +# else + bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == + sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MemoryZirconHandlePropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MetalSurfaceCreateInfoEXT + { + using NativeType = VkMetalSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer * pLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pLayer( pLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR + MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pLayer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); +# endif + } + + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == + sizeof( VkMetalSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MultiDrawIndexedInfoEXT + { + using NativeType = VkMultiDrawIndexedInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, + uint32_t indexCount_ = {}, + int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : firstIndex( firstIndex_ ) + , indexCount( indexCount_ ) + , vertexOffset( vertexOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstIndex, indexCount, vertexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && + ( vertexOffset == rhs.vertexOffset ); +# endif + } + + bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstIndex = {}; + uint32_t indexCount = {}; + int32_t vertexOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == + sizeof( VkMultiDrawIndexedInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" ); + + struct MultiDrawInfoEXT + { + using NativeType = VkMultiDrawInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT + : firstVertex( firstVertex_ ) + , vertexCount( vertexCount_ ) + {} + + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiDrawInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstVertex, vertexCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount ); +# endif + } + + bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstVertex = {}; + uint32_t vertexCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiDrawInfoEXT is not nothrow_move_constructible!" ); + + struct MultisamplePropertiesEXT + { + using NativeType = VkMultisamplePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + {} + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSampleLocationGridSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); +# endif + } + + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == + sizeof( VkMultisamplePropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultisamplePropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; + }; + + struct MultiviewPerViewAttributesInfoNVX + { + using NativeType = VkMultiviewPerViewAttributesInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMultiviewPerViewAttributesInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {} ) VULKAN_HPP_NOEXCEPT + : perViewAttributes( perViewAttributes_ ) + , perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ ) + {} + + VULKAN_HPP_CONSTEXPR + MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiviewPerViewAttributesInfoNVX & + operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & + setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributes = perViewAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly( + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default; +#else + bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) && + ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly ); +# endif + } + + bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == + sizeof( VkMultiviewPerViewAttributesInfoNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MultiviewPerViewAttributesInfoNVX; + }; + + struct MutableDescriptorTypeListVALVE + { + using NativeType = VkMutableDescriptorTypeListVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE( + uint32_t descriptorTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorTypeCount( descriptorTypeCount_ ) + , pDescriptorTypes( pDescriptorTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR + MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeListVALVE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListVALVE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorTypes_ ) + : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ) + , pDescriptorTypes( descriptorTypes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MutableDescriptorTypeListVALVE & + operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & + setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = descriptorTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & + setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorTypes = pDescriptorTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListVALVE & setDescriptorTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = static_cast( descriptorTypes_.size() ); + pDescriptorTypes = descriptorTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( descriptorTypeCount, pDescriptorTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeListVALVE const & ) const = default; +#else + bool operator==( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); +# endif + } + + bool operator!=( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t descriptorTypeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE ) == + sizeof( VkMutableDescriptorTypeListVALVE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MutableDescriptorTypeListVALVE is not nothrow_move_constructible!" ); + + struct MutableDescriptorTypeCreateInfoVALVE + { + using NativeType = VkMutableDescriptorTypeCreateInfoVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eMutableDescriptorTypeCreateInfoVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( + uint32_t mutableDescriptorTypeListCount_ = {}, + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ = {} ) + VULKAN_HPP_NOEXCEPT + : mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ) + , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ ) + {} + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoVALVE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mutableDescriptorTypeLists_ ) + : mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) + , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MutableDescriptorTypeCreateInfoVALVE & + operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoVALVE & + operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & + setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists( + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); + pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const & ) const = default; +#else + bool operator==( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && + ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); +# endif + } + + bool operator!=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE; + const void * pNext = {}; + uint32_t mutableDescriptorTypeListCount = {}; + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE ) == + sizeof( VkMutableDescriptorTypeCreateInfoVALVE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "MutableDescriptorTypeCreateInfoVALVE is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = MutableDescriptorTypeCreateInfoVALVE; + }; + + struct PastPresentationTimingGOOGLE + { + using NativeType = VkPastPresentationTimingGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, + uint64_t earliestPresentTime_ = {}, + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + , actualPresentTime( actualPresentTime_ ) + , earliestPresentTime( earliestPresentTime_ ) + , presentMargin( presentMargin_ ) + {} + + VULKAN_HPP_CONSTEXPR + PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && + ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) && + ( presentMargin == rhs.presentMargin ); +# endif + } + + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == + sizeof( VkPastPresentationTimingGOOGLE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" ); + + struct PerformanceConfigurationAcquireInfoINTEL + { + using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) + VULKAN_HPP_NOEXCEPT : type( type_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( + PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT + : PerformanceConfigurationAcquireInfoINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceConfigurationAcquireInfoINTEL & + operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL & + operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); +# endif + } + + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == + sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceConfigurationAcquireInfoINTEL; + }; + + struct PerformanceCounterDescriptionKHR + { + using NativeType = VkPerformanceCounterDescriptionKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , name( name_ ) + , category( category_ ) + , description( description_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceCounterDescriptionKHR & + operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, name, category, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; +#else + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && + ( category == rhs.category ) && ( description == rhs.description ); +# endif + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == + sizeof( VkPerformanceCounterDescriptionKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + struct PerformanceCounterKHR + { + using NativeType = VkPerformanceCounterKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {} ) VULKAN_HPP_NOEXCEPT + : unit( unit_ ) + , scope( scope_ ) + , storage( storage_ ) + , uuid( uuid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, unit, scope, storage, uuid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; +#else + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && + ( storage == rhs.storage ) && ( uuid == rhs.uuid ); +# endif + } + + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + union PerformanceCounterResultKHR + { + using NativeType = VkPerformanceCounterResultKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT + { + int64 = int64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + { + uint64 = uint64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + { + float64 = float64_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkPerformanceCounterResultKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterResultKHR &() + { + return *reinterpret_cast( this ); + } + + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; + }; + + struct PerformanceMarkerInfoINTEL + { + using NativeType = VkPerformanceMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT : marker( marker_ ) {} + + VULKAN_HPP_CONSTEXPR + PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == + sizeof( VkPerformanceMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + struct PerformanceOverrideInfoINTEL + { + using NativeType = VkPerformanceOverrideInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , enable( enable_ ) + , parameter( parameter_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & + setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, enable, parameter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && + ( parameter == rhs.parameter ); +# endif + } + + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == + sizeof( VkPerformanceOverrideInfoINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + struct PerformanceQuerySubmitInfoKHR + { + using NativeType = VkPerformanceQuerySubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : counterPassIndex( counterPassIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceQuerySubmitInfoKHR & + operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & + setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + { + counterPassIndex = counterPassIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, counterPassIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; +#else + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); +# endif + } + + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == + sizeof( VkPerformanceQuerySubmitInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceQuerySubmitInfoKHR; + }; + + struct PerformanceStreamMarkerInfoINTEL + { + using NativeType = VkPerformanceStreamMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR + PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceStreamMarkerInfoINTEL & + operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == + sizeof( VkPerformanceStreamMarkerInfoINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; + }; + + union PerformanceValueDataINTEL + { + using NativeType = VkPerformanceValueDataINTEL; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & + setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkPerformanceValueDataINTEL const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char * valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PerformanceValueINTEL + { + using NativeType = VkPerformanceValueINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , data( data_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & + setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceValueINTEL is not nothrow_move_constructible!" ); + + struct PhysicalDevice16BitStorageFeatures + { + using NativeType = VkPhysicalDevice16BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevice16BitStorageFeatures & + operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures & + operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageBuffer16BitAccess, + uniformAndStorageBuffer16BitAccess, + storagePushConstant16, + storageInputOutput16 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); +# endif + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == + sizeof( VkPhysicalDevice16BitStorageFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice16BitStorageFeatures; + }; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct PhysicalDevice4444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {} ) VULKAN_HPP_NOEXCEPT + : formatA4R4G4B4( formatA4R4G4B4_ ) + , formatA4B4G4R4( formatA4B4G4R4_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevice4444FormatsFeaturesEXT & + operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT & + operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & + setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & + setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && + ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); +# endif + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == + sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + struct PhysicalDevice8BitStorageFeatures + { + using NativeType = VkPhysicalDevice8BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevice8BitStorageFeatures & + operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ); +# endif + } + + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == + sizeof( VkPhysicalDevice8BitStorageFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT + : decodeModeSharedExponent( decodeModeSharedExponent_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT & + operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeModeSharedExponent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); +# endif + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == + sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + + struct PhysicalDeviceAccelerationStructureFeaturesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ) + , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ) + , accelerationStructureHostCommands( accelerationStructureHostCommands_ ) + , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( + PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructureFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructureFeaturesKHR & + operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR & + operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureHostCommands = accelerationStructureHostCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setDescriptorBindingAccelerationStructureUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + accelerationStructure, + accelerationStructureCaptureReplay, + accelerationStructureIndirectBuild, + accelerationStructureHostCommands, + descriptorBindingAccelerationStructureUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && + ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && + ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && + ( descriptorBindingAccelerationStructureUpdateAfterBind == + rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + }; + + struct PhysicalDeviceAccelerationStructurePropertiesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, + uint32_t minAccelerationStructureScratchOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxPrimitiveCount( maxPrimitiveCount_ ) + , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ) + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( + maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ) + , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( + PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructurePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructurePropertiesKHR & + operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR & + operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGeometryCount, + maxInstanceCount, + maxPrimitiveCount, + maxPerStageDescriptorAccelerationStructures, + maxPerStageDescriptorUpdateAfterBindAccelerationStructures, + maxDescriptorSetAccelerationStructures, + maxDescriptorSetUpdateAfterBindAccelerationStructures, + minAccelerationStructureScratchOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && + ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == + rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( maxDescriptorSetUpdateAfterBindAccelerationStructures == + rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && + ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + void * pNext = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxPerStageDescriptorAccelerationStructures = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; + uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + }; + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, advancedBlendCoherentOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) + , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) + , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) + , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) + , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) + , advancedBlendAllOperations( advancedBlendAllOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + advancedBlendMaxColorAttachments, + advancedBlendIndependentBlend, + advancedBlendNonPremultipliedSrcColor, + advancedBlendNonPremultipliedDstColor, + advancedBlendCorrelatedOverlap, + advancedBlendAllOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && + ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT + { + using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {} ) VULKAN_HPP_NOEXCEPT + : borderColorSwizzle( borderColorSwizzle_ ) + , borderColorSwizzleFromImage( borderColorSwizzleFromImage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( + PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBorderColorSwizzleFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & + operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & + operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzle = borderColorSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzleFromImage = borderColorSwizzleFromImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) && + ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage ); +# endif + } + + bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) == + sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; + }; + + struct PhysicalDeviceBufferDeviceAddressFeatures + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( + PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeatures & + operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures & + operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( + PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & + operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceCoherentMemory( deviceCoherentMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoherentMemoryFeaturesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD & + operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceCoherentMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == + sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + + struct PhysicalDeviceColorWriteEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {} ) VULKAN_HPP_NOEXCEPT : colorWriteEnable( colorWriteEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( + PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceColorWriteEnableFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceColorWriteEnableFeaturesEXT & + operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT & + operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & + setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteEnable = colorWriteEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorWriteEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); +# endif + } + + bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + }; + + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT + : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) + , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( + PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & + operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + }; + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRendering( conditionalRendering_ ) + , inheritedConditionalRendering( inheritedConditionalRendering_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( + PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConditionalRenderingFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT & + operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); +# endif + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) == + sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( + PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT( + VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConservativeRasterizationPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + primitiveOverestimationSize, + maxExtraPrimitiveOverestimationSize, + extraPrimitiveOverestimationSizeGranularity, + primitiveUnderestimation, + conservativePointAndLineRasterization, + degenerateTrianglesRasterized, + degenerateLinesRasterized, + fullyCoveredFragmentShaderInputVariable, + conservativeRasterizationPostDepthCoverage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && + ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); +# endif + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, + "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrix( cooperativeMatrix_ ) + , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( + PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV & + operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( + PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV & + operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT + : cornerSampledImage( cornerSampledImage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( + PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCornerSampledImageFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV & + operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cornerSampledImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); +# endif + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == + sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( + PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoverageReductionModeFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV & + operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) == + sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : customBorderColors( customBorderColors_ ) + , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( + PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorFeaturesEXT & + operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT & + operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && + ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( + PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorPropertiesEXT & + operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT & + operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCustomBorderColorSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void * pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( + &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) + VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocationImageAliasing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); +# endif + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == + sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, + "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + + struct PhysicalDeviceDepthClipControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {} ) VULKAN_HPP_NOEXCEPT : depthClipControl( depthClipControl_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( + PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipControlFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipControlFeaturesEXT & + operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipControlFeaturesEXT & + operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & + setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClipControl = depthClipControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == + sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipControlFeaturesEXT; + }; + + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( + PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipEnableFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT & + operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == + sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + + struct PhysicalDeviceDepthStencilResolveProperties + { + using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( + PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthStencilResolveProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthStencilResolveProperties & + operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties & + operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + supportedDepthResolveModes, + supportedStencilResolveModes, + independentResolveNone, + independentResolve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); +# endif + } + + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) == + sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + + struct PhysicalDeviceDescriptorIndexingFeatures + { + using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( + PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingFeatures & + operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures & + operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == + sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + + struct PhysicalDeviceDescriptorIndexingProperties + { + using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( + PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingProperties & + operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties & + operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == + sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceGeneratedCommands( deviceGeneratedCommands_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) + , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) + , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) + , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) + , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) + , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) + , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) + , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) + , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGraphicsShaderGroupCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsStreamCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsStreamStride, + minSequencesCountBufferOffsetAlignment, + minSequencesIndexBufferOffsetAlignment, + minIndirectCommandsBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMemoryReport( deviceMemoryReport_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( + PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceMemoryReportFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & + operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & + operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & + setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + { + deviceMemoryReport = deviceMemoryReport_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMemoryReport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + }; + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT + : diagnosticsConfig( diagnosticsConfig_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( + PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiagnosticsConfigFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV & + operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, diagnosticsConfig ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); +# endif + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == + sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDiscardRectangles( maxDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( + PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiscardRectanglePropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT & + operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); +# endif + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) == + sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + + struct PhysicalDeviceDriverProperties + { + using NativeType = VkPhysicalDeviceDriverProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDriverProperties & + operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ); +# endif + } + + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == + sizeof( VkPhysicalDeviceDriverProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + + struct PhysicalDeviceDrmPropertiesEXT + { + using NativeType = VkPhysicalDeviceDrmPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, + int64_t primaryMajor_ = {}, + int64_t primaryMinor_ = {}, + int64_t renderMajor_ = {}, + int64_t renderMinor_ = {} ) VULKAN_HPP_NOEXCEPT + : hasPrimary( hasPrimary_ ) + , hasRender( hasRender_ ) + , primaryMajor( primaryMajor_ ) + , primaryMinor( primaryMinor_ ) + , renderMajor( renderMajor_ ) + , renderMinor( renderMinor_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDrmPropertiesEXT & + operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && + ( hasRender == rhs.hasRender ) && ( primaryMajor == rhs.primaryMajor ) && + ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) && + ( renderMinor == rhs.renderMinor ); +# endif + } + + bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasRender = {}; + int64_t primaryMajor = {}; + int64_t primaryMinor = {}; + int64_t renderMajor = {}; + int64_t renderMinor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == + sizeof( VkPhysicalDeviceDrmPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDrmPropertiesEXT; + }; + + struct PhysicalDeviceDynamicRenderingFeatures + { + using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceDynamicRenderingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {} ) VULKAN_HPP_NOEXCEPT + : dynamicRendering( dynamicRendering_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingFeatures & + operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingFeatures & + operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & + setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == + sizeof( VkPhysicalDeviceDynamicRenderingFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingFeatures; + }; + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) + VULKAN_HPP_NOEXCEPT : exclusiveScissor( exclusiveScissor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( + PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExclusiveScissorFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV & + operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & + setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); +# endif + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + : extendedDynamicState2( extendedDynamicState2_ ) + , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ) + , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( + PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & + operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2 = extendedDynamicState2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && + ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && + ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT + : extendedDynamicState( extendedDynamicState_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( + PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & + operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState = extendedDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceExternalBufferInfo + { + using NativeType = VkPhysicalDeviceExternalBufferInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalBufferInfo & + operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == + sizeof( VkPhysicalDeviceExternalBufferInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalBufferInfo; + }; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct PhysicalDeviceExternalFenceInfo + { + using NativeType = VkPhysicalDeviceExternalFenceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalFenceInfo & + operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == + sizeof( VkPhysicalDeviceExternalFenceInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFenceInfo; + }; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + + struct PhysicalDeviceExternalImageFormatInfo + { + using NativeType = VkPhysicalDeviceExternalImageFormatInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalImageFormatInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalImageFormatInfo & + operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo & + operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == + sizeof( VkPhysicalDeviceExternalImageFormatInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( + PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryHostPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT & + operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minImportedHostPointerAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) == + sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + }; + + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV + { + using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryRDMA( externalMemoryRDMA_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( + PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryRDMAFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & + operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & + operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & + setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT + { + externalMemoryRDMA = externalMemoryRDMA_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryRDMA ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == + sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; + }; + + struct PhysicalDeviceExternalSemaphoreInfo + { + using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceExternalSemaphoreInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalSemaphoreInfo & + operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo & + operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == + sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalSemaphoreInfo; + }; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct PhysicalDeviceFeatures2 + { + using NativeType = VkPhysicalDeviceFeatures2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT + : features( features_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & + setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, features ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == + sizeof( VkPhysicalDeviceFeatures2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFeatures2; + }; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct PhysicalDeviceFloatControlsProperties + { + using NativeType = VkPhysicalDeviceFloatControlsProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT + : denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFloatControlsProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFloatControlsProperties & + operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties & + operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); +# endif + } + + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == + sizeof( VkPhysicalDeviceFloatControlsProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFloatControlsProperties; + }; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( + PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & + setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapDeferred ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, + uint32_t maxSubsampledArrayLayers_ = {}, + uint32_t maxDescriptorSetSubsampledSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : subsampledLoads( subsampledLoads_ ) + , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ) + , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ) + , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( + PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2PropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + subsampledLoads, + subsampledCoarseReconstructionEarlyAccess, + maxSubsampledArrayLayers, + maxDescriptorSetSubsampledSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && + ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && + ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && + ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMap( fragmentDensityMap_ ) + , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) + , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( + PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapOffset( fragmentDensityMapOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( + VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapOffset = fragmentDensityMapOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) == + sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + }; + + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( + VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( + VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & + operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & + operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) == + sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value, + "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + }; + + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) + , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) + , fragmentDensityInvocations( fragmentDensityInvocations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( + PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT & + operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && + ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderBarycentric ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderBarycentricFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + }; + + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) + , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) + , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShadingRateEnums( fragmentShadingRateEnums_ ) + , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ) + , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShadingRateEnums = fragmentShadingRateEnums_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && + ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && + ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT + : maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + setMaxFragmentShadingRateInvocationCount( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT + { + maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + }; + + struct PhysicalDeviceFragmentShadingRateFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ) + , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ) + , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( + PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR & + operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && + ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && + ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + }; + + struct PhysicalDeviceFragmentShadingRateKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRateKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleCounts( sampleCounts_ ) + , fragmentSize( fragmentSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateKHR & + operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR & + operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleCounts, fragmentSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && + ( fragmentSize == rhs.fragmentSize ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateKHR; + }; + + struct PhysicalDeviceFragmentShadingRatePropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, + uint32_t maxFragmentSizeAspectRatio_ = {}, + uint32_t maxFragmentShadingRateCoverageSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ) + , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ) + , layeredShadingRateAttachments( layeredShadingRateAttachments_ ) + , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ) + , maxFragmentSize( maxFragmentSize_ ) + , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ) + , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ) + , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ) + , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ) + , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ) + , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ) + , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ) + , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ) + , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ) + , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRatePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRatePropertiesKHR & + operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR & + operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSizeAspectRatio, + primitiveFragmentShadingRateWithMultipleViewports, + layeredShadingRateAttachments, + fragmentShadingRateNonTrivialCombinerOps, + maxFragmentSize, + maxFragmentSizeAspectRatio, + maxFragmentShadingRateCoverageSamples, + maxFragmentShadingRateRasterizationSamples, + fragmentShadingRateWithShaderDepthStencilWrites, + fragmentShadingRateWithSampleMask, + fragmentShadingRateWithShaderSampleMask, + fragmentShadingRateWithConservativeRasterization, + fragmentShadingRateWithFragmentShaderInterlock, + fragmentShadingRateWithCustomSampleLocations, + fragmentShadingRateStrictMultiplyCombiner ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == + rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && + ( primitiveFragmentShadingRateWithMultipleViewports == + rhs.primitiveFragmentShadingRateWithMultipleViewports ) && + ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && + ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && + ( maxFragmentSize == rhs.maxFragmentSize ) && + ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && + ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && + ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && + ( fragmentShadingRateWithShaderDepthStencilWrites == + rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && + ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && + ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && + ( fragmentShadingRateWithConservativeRasterization == + rhs.fragmentShadingRateWithConservativeRasterization ) && + ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && + ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && + ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; + uint32_t maxFragmentSizeAspectRatio = {}; + uint32_t maxFragmentShadingRateCoverageSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = + VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + }; + + struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR + { + using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {} ) VULKAN_HPP_NOEXCEPT + : globalPriorityQuery( globalPriorityQuery_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( + PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGlobalPriorityQueryFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceGlobalPriorityQueryFeaturesKHR & + operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeaturesKHR & + operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & + setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriorityQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); +# endif + } + + bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) == + sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + }; + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + + struct PhysicalDeviceGroupProperties + { + using NativeType = VkPhysicalDeviceGroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( + uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , physicalDevices( physicalDevices_ ) + , subsetAllocation( subsetAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceGroupProperties & + operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple< + VULKAN_HPP_NAMESPACE::StructureType const &, + void * const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); +# endif + } + + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D + physicalDevices = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == + sizeof( VkPhysicalDeviceGroupProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + + struct PhysicalDeviceHostQueryResetFeatures + { + using NativeType = VkPhysicalDeviceHostQueryResetFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT + : hostQueryReset( hostQueryReset_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHostQueryResetFeatures & + operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures & + operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostQueryReset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); +# endif + } + + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == + sizeof( VkPhysicalDeviceHostQueryResetFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + + struct PhysicalDeviceIDProperties + { + using NativeType = VkPhysicalDeviceIDProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); +# endif + } + + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == + sizeof( VkPhysicalDeviceIDProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIDProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( + PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageDrmFormatModifierInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT & + operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) == + sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + struct PhysicalDeviceImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , type( type_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageFormatInfo2 & + operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & + setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, tiling, usage, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == + sizeof( VkPhysicalDeviceImageFormatInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageFormatInfo2; + }; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + struct PhysicalDeviceImageRobustnessFeatures + { + using NativeType = VkPhysicalDeviceImageRobustnessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageRobustnessFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : robustImageAccess( robustImageAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageRobustnessFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageRobustnessFeatures & + operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeatures & + operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & + setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustImageAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); +# endif + } + + bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == + sizeof( VkPhysicalDeviceImageRobustnessFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeatures; + }; + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = + VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT + : imageViewType( imageViewType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( + PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewImageFormatInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT & + operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageViewType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == + sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + + struct PhysicalDeviceImageViewMinLodFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {} ) VULKAN_HPP_NOEXCEPT + : minLod( minLod_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( + PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewMinLodFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageViewMinLodFeaturesEXT & + operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewMinLodFeaturesEXT & + operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & + setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 minLod = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; + }; + + struct PhysicalDeviceImagelessFramebufferFeatures + { + using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : imagelessFramebuffer( imagelessFramebuffer_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( + PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImagelessFramebufferFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImagelessFramebufferFeatures & + operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures & + operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imagelessFramebuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); +# endif + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == + sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + + struct PhysicalDeviceIndexTypeUint8FeaturesEXT + { + using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT + : indexTypeUint8( indexTypeUint8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT & + operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & + setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexTypeUint8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); +# endif + } + + bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == + sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; + }; + + struct PhysicalDeviceInheritedViewportScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {} ) VULKAN_HPP_NOEXCEPT + : inheritedViewportScissor2D( inheritedViewportScissor2D_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( + PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInheritedViewportScissorFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInheritedViewportScissorFeaturesNV & + operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV & + operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + inheritedViewportScissor2D = inheritedViewportScissor2D_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inheritedViewportScissor2D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); +# endif + } + + bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + }; + + struct PhysicalDeviceInlineUniformBlockFeatures + { + using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( + PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockFeatures & + operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeatures & + operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & + setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == + rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == + sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeatures; + }; + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + + struct PhysicalDeviceInlineUniformBlockProperties + { + using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInlineUniformBlockProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( + PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockProperties & + operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockProperties & + operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == + rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == + rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == + sizeof( VkPhysicalDeviceInlineUniformBlockProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockProperties; + }; + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {} ) + VULKAN_HPP_NOEXCEPT : invocationMask( invocationMask_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( + PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInvocationMaskFeaturesHUAWEI( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & + operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & + operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & + setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT + { + invocationMask = invocationMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, invocationMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask ); +# endif + } + + bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; + }; + + struct PhysicalDeviceLimits + { + using NativeType = VkPhysicalDeviceLimits; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D( maxImageDimension1D_ ) + , maxImageDimension2D( maxImageDimension2D_ ) + , maxImageDimension3D( maxImageDimension3D_ ) + , maxImageDimensionCube( maxImageDimensionCube_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , maxTexelBufferElements( maxTexelBufferElements_ ) + , maxUniformBufferRange( maxUniformBufferRange_ ) + , maxStorageBufferRange( maxStorageBufferRange_ ) + , maxPushConstantsSize( maxPushConstantsSize_ ) + , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) + , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) + , bufferImageGranularity( bufferImageGranularity_ ) + , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) + , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) + , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) + , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) + , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) + , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) + , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) + , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) + , maxPerStageResources( maxPerStageResources_ ) + , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) + , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) + , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) + , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) + , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) + , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) + , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) + , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) + , maxVertexInputAttributes( maxVertexInputAttributes_ ) + , maxVertexInputBindings( maxVertexInputBindings_ ) + , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) + , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) + , maxVertexOutputComponents( maxVertexOutputComponents_ ) + , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) + , maxTessellationPatchSize( maxTessellationPatchSize_ ) + , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) + , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) + , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) + , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) + , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) + , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) + , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) + , maxGeometryInputComponents( maxGeometryInputComponents_ ) + , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) + , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) + , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) + , maxFragmentInputComponents( maxFragmentInputComponents_ ) + , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) + , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) + , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) + , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) + , maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ) + , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) + , maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ) + , subPixelPrecisionBits( subPixelPrecisionBits_ ) + , subTexelPrecisionBits( subTexelPrecisionBits_ ) + , mipmapPrecisionBits( mipmapPrecisionBits_ ) + , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) + , maxDrawIndirectCount( maxDrawIndirectCount_ ) + , maxSamplerLodBias( maxSamplerLodBias_ ) + , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) + , maxViewports( maxViewports_ ) + , maxViewportDimensions( maxViewportDimensions_ ) + , viewportBoundsRange( viewportBoundsRange_ ) + , viewportSubPixelBits( viewportSubPixelBits_ ) + , minMemoryMapAlignment( minMemoryMapAlignment_ ) + , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) + , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) + , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) + , minTexelOffset( minTexelOffset_ ) + , maxTexelOffset( maxTexelOffset_ ) + , minTexelGatherOffset( minTexelGatherOffset_ ) + , maxTexelGatherOffset( maxTexelGatherOffset_ ) + , minInterpolationOffset( minInterpolationOffset_ ) + , maxInterpolationOffset( maxInterpolationOffset_ ) + , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) + , maxFramebufferWidth( maxFramebufferWidth_ ) + , maxFramebufferHeight( maxFramebufferHeight_ ) + , maxFramebufferLayers( maxFramebufferLayers_ ) + , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) + , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) + , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) + , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) + , maxColorAttachments( maxColorAttachments_ ) + , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) + , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) + , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) + , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) + , storageImageSampleCounts( storageImageSampleCounts_ ) + , maxSampleMaskWords( maxSampleMaskWords_ ) + , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) + , timestampPeriod( timestampPeriod_ ) + , maxClipDistances( maxClipDistances_ ) + , maxCullDistances( maxCullDistances_ ) + , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) + , discreteQueuePriorities( discreteQueuePriorities_ ) + , pointSizeRange( pointSizeRange_ ) + , lineWidthRange( lineWidthRange_ ) + , pointSizeGranularity( pointSizeGranularity_ ) + , lineWidthGranularity( lineWidthGranularity_ ) + , strictLines( strictLines_ ) + , standardSampleLocations( standardSampleLocations_ ) + , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) + , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) + , nonCoherentAtomSize( nonCoherentAtomSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + size_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + int32_t const &, + uint32_t const &, + int32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + float const &, + float const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxImageDimension1D, + maxImageDimension2D, + maxImageDimension3D, + maxImageDimensionCube, + maxImageArrayLayers, + maxTexelBufferElements, + maxUniformBufferRange, + maxStorageBufferRange, + maxPushConstantsSize, + maxMemoryAllocationCount, + maxSamplerAllocationCount, + bufferImageGranularity, + sparseAddressSpaceSize, + maxBoundDescriptorSets, + maxPerStageDescriptorSamplers, + maxPerStageDescriptorUniformBuffers, + maxPerStageDescriptorStorageBuffers, + maxPerStageDescriptorSampledImages, + maxPerStageDescriptorStorageImages, + maxPerStageDescriptorInputAttachments, + maxPerStageResources, + maxDescriptorSetSamplers, + maxDescriptorSetUniformBuffers, + maxDescriptorSetUniformBuffersDynamic, + maxDescriptorSetStorageBuffers, + maxDescriptorSetStorageBuffersDynamic, + maxDescriptorSetSampledImages, + maxDescriptorSetStorageImages, + maxDescriptorSetInputAttachments, + maxVertexInputAttributes, + maxVertexInputBindings, + maxVertexInputAttributeOffset, + maxVertexInputBindingStride, + maxVertexOutputComponents, + maxTessellationGenerationLevel, + maxTessellationPatchSize, + maxTessellationControlPerVertexInputComponents, + maxTessellationControlPerVertexOutputComponents, + maxTessellationControlPerPatchOutputComponents, + maxTessellationControlTotalOutputComponents, + maxTessellationEvaluationInputComponents, + maxTessellationEvaluationOutputComponents, + maxGeometryShaderInvocations, + maxGeometryInputComponents, + maxGeometryOutputComponents, + maxGeometryOutputVertices, + maxGeometryTotalOutputComponents, + maxFragmentInputComponents, + maxFragmentOutputAttachments, + maxFragmentDualSrcAttachments, + maxFragmentCombinedOutputResources, + maxComputeSharedMemorySize, + maxComputeWorkGroupCount, + maxComputeWorkGroupInvocations, + maxComputeWorkGroupSize, + subPixelPrecisionBits, + subTexelPrecisionBits, + mipmapPrecisionBits, + maxDrawIndexedIndexValue, + maxDrawIndirectCount, + maxSamplerLodBias, + maxSamplerAnisotropy, + maxViewports, + maxViewportDimensions, + viewportBoundsRange, + viewportSubPixelBits, + minMemoryMapAlignment, + minTexelBufferOffsetAlignment, + minUniformBufferOffsetAlignment, + minStorageBufferOffsetAlignment, + minTexelOffset, + maxTexelOffset, + minTexelGatherOffset, + maxTexelGatherOffset, + minInterpolationOffset, + maxInterpolationOffset, + subPixelInterpolationOffsetBits, + maxFramebufferWidth, + maxFramebufferHeight, + maxFramebufferLayers, + framebufferColorSampleCounts, + framebufferDepthSampleCounts, + framebufferStencilSampleCounts, + framebufferNoAttachmentsSampleCounts, + maxColorAttachments, + sampledImageColorSampleCounts, + sampledImageIntegerSampleCounts, + sampledImageDepthSampleCounts, + sampledImageStencilSampleCounts, + storageImageSampleCounts, + maxSampleMaskWords, + timestampComputeAndGraphics, + timestampPeriod, + maxClipDistances, + maxCullDistances, + maxCombinedClipAndCullDistances, + discreteQueuePriorities, + pointSizeRange, + lineWidthRange, + pointSizeGranularity, + lineWidthGranularity, + strictLines, + standardSampleLocations, + optimalBufferCopyOffsetAlignment, + optimalBufferCopyRowPitchAlignment, + nonCoherentAtomSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; +#else + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && + ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && + ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && + ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && + ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && + ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && + ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && + ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && + ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && + ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && + ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && + ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && + ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == + rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && + ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && + ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && + ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && + ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && + ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && + ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && + ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && + ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && + ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && + ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && + ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) && + ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && + ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && + ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && + ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && + ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && + ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && + ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && + ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && + ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && + ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && + ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) && + ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && + ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && + ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); +# endif + } + + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLimits is not nothrow_move_constructible!" ); + + struct PhysicalDeviceLineRasterizationFeaturesEXT + { + using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangularLines( rectangularLines_ ) + , bresenhamLines( bresenhamLines_ ) + , smoothLines( smoothLines_ ) + , stippledRectangularLines( stippledRectangularLines_ ) + , stippledBresenhamLines( stippledBresenhamLines_ ) + , stippledSmoothLines( stippledSmoothLines_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( + PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT & + operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + rectangularLines, + bresenhamLines, + smoothLines, + stippledRectangularLines, + stippledBresenhamLines, + stippledSmoothLines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && + ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeaturesEXT; + }; + + struct PhysicalDeviceLineRasterizationPropertiesEXT + { + using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT + : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( + PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT & + operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineSubPixelPrecisionBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceLinearColorAttachmentFeaturesNV + { + using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : linearColorAttachment( linearColorAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( + PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLinearColorAttachmentFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLinearColorAttachmentFeaturesNV & + operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLinearColorAttachmentFeaturesNV & + operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & + setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + linearColorAttachment = linearColorAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearColorAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment ); +# endif + } + + bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == + sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; + }; + + struct PhysicalDeviceMaintenance3Properties + { + using NativeType = VkPhysicalDeviceMaintenance3Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance3Properties & + operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties & + operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == + sizeof( VkPhysicalDeviceMaintenance3Properties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct PhysicalDeviceMaintenance4Features + { + using NativeType = VkPhysicalDeviceMaintenance4Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMaintenance4Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {} ) VULKAN_HPP_NOEXCEPT + : maintenance4( maintenance4_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Features & + operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Features & + operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & + setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == + sizeof( VkPhysicalDeviceMaintenance4Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Features; + }; + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + + struct PhysicalDeviceMaintenance4Properties + { + using NativeType = VkPhysicalDeviceMaintenance4Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMaintenance4Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxBufferSize( maxBufferSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Properties & + operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Properties & + operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == + sizeof( VkPhysicalDeviceMaintenance4Properties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Properties; + }; + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : heapBudget( heapBudget_ ) + , heapUsage( heapUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( + PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryBudgetPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT & + operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, heapBudget, heapUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && + ( heapUsage == rhs.heapUsage ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == + sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryPriority( memoryPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryPriorityFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT & + operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & + setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == + sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + }; + + struct PhysicalDeviceMemoryProperties + { + using NativeType = VkPhysicalDeviceMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( + uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( memoryTypeCount_ ) + , memoryTypes( memoryTypes_ ) + , memoryHeapCount( memoryHeapCount_ ) + , memoryHeaps( memoryHeaps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties & + operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && ( memoryHeaps == rhs.memoryHeaps ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == + sizeof( VkPhysicalDeviceMemoryProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" ); + + struct PhysicalDeviceMemoryProperties2 + { + using NativeType = VkPhysicalDeviceMemoryProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryProperties( memoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties2 & + operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == + sizeof( VkPhysicalDeviceMemoryProperties2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryProperties2; + }; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + struct PhysicalDeviceMeshShaderFeaturesNV + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT + : taskShader( taskShader_ ) + , meshShader( meshShader_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesNV & + operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV & + operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & + setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & + setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && + ( meshShader == rhs.meshShader ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == + sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + + struct PhysicalDeviceMeshShaderPropertiesNV + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) + , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) + , maxTaskOutputCount( maxTaskOutputCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) + , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesNV & + operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV & + operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxDrawMeshTasksCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskTotalMemorySize, + maxTaskOutputCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshTotalMemorySize, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && + ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && + ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && + ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == + sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + + struct PhysicalDeviceMultiDrawFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {} ) VULKAN_HPP_NOEXCEPT + : multiDraw( multiDraw_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawFeaturesEXT & + operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawFeaturesEXT & + operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & + setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT + { + multiDraw = multiDraw_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == + sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawFeaturesEXT; + }; + + struct PhysicalDeviceMultiDrawPropertiesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {} ) VULKAN_HPP_NOEXCEPT + : maxMultiDrawCount( maxMultiDrawCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawPropertiesEXT & + operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawPropertiesEXT & + operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + void * pNext = {}; + uint32_t maxMultiDrawCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == + sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawPropertiesEXT; + }; + + struct PhysicalDeviceMultiviewFeatures + { + using NativeType = VkPhysicalDeviceMultiviewFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT + : multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewFeatures & + operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == + sizeof( VkPhysicalDeviceMultiviewFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT + : perViewPositionAllComponents( perViewPositionAllComponents_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewPositionAllComponents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == + sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, + "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + + struct PhysicalDeviceMultiviewProperties + { + using NativeType = VkPhysicalDeviceMultiviewProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewProperties & + operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == + sizeof( VkPhysicalDeviceMultiviewProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE + { + using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {} ) VULKAN_HPP_NOEXCEPT + : mutableDescriptorType( mutableDescriptorType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & + setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorType = mutableDescriptorType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & ) const = default; +#else + bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); +# endif + } + + bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) == + sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + }; + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, + uint32_t pciBus_ = {}, + uint32_t pciDevice_ = {}, + uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT + : pciDomain( pciDomain_ ) + , pciBus( pciBus_ ) + , pciDevice( pciDevice_ ) + , pciFunction( pciFunction_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePCIBusInfoPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT & + operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && + ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction ); +# endif + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == + sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT + { + using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : pageableDeviceLocalMemory( pageableDeviceLocalMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( + VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT + { + pageableDeviceLocalMemory = pageableDeviceLocalMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pageableDeviceLocalMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory ); +# endif + } + + bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) == + sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + }; + + struct PhysicalDevicePerformanceQueryFeaturesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT + : performanceCounterQueryPools( performanceCounterQueryPools_ ) + , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( + PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR & + operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterQueryPools = performanceCounterQueryPools_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + + struct PhysicalDevicePerformanceQueryPropertiesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT + : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( + PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR & + operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allowCommandBufferQueryCopies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + struct PhysicalDevicePipelineCreationCacheControlFeatures + { + using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( + PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeatures( + VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineCreationCacheControlFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineCreationCacheControlFeatures & + operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeatures & + operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineCreationCacheControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); +# endif + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) == + sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeatures; + }; + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineExecutableInfo( pipelineExecutableInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineExecutableInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); +# endif + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == + sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, + "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + + struct PhysicalDevicePointClippingProperties + { + using NativeType = VkPhysicalDevicePointClippingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT + : pointClippingBehavior( pointClippingBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePointClippingProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePointClippingProperties & + operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties & + operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pointClippingBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); +# endif + } + + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == + sizeof( VkPhysicalDevicePointClippingProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePointClippingProperties; + }; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {} ) VULKAN_HPP_NOEXCEPT + : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ) + , events( events_ ) + , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ) + , imageViewFormatSwizzle( imageViewFormatSwizzle_ ) + , imageView2DOn3DImage( imageView2DOn3DImage_ ) + , multisampleArrayImage( multisampleArrayImage_ ) + , mutableComparisonSamplers( mutableComparisonSamplers_ ) + , pointPolygons( pointPolygons_ ) + , samplerMipLodBias( samplerMipLodBias_ ) + , separateStencilMaskRef( separateStencilMaskRef_ ) + , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ) + , tessellationIsolines( tessellationIsolines_ ) + , tessellationPointMode( tessellationPointMode_ ) + , triangleFans( triangleFans_ ) + , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( + PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetFeaturesKHR & + operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR & + operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT + { + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT + { + events = events_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + constantAlphaColorBlendFactors, + events, + imageViewFormatReinterpretation, + imageViewFormatSwizzle, + imageView2DOn3DImage, + multisampleArrayImage, + mutableComparisonSamplers, + pointPolygons, + samplerMipLodBias, + separateStencilMaskRef, + shaderSampleRateInterpolationFunctions, + tessellationIsolines, + tessellationPointMode, + triangleFans, + vertexAttributeAccessBeyondStride ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && ( events == rhs.events ) && + ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && + ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && + ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && + ( multisampleArrayImage == rhs.multisampleArrayImage ) && + ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && ( pointPolygons == rhs.pointPolygons ) && + ( samplerMipLodBias == rhs.samplerMipLodBias ) && + ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && + ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && + ( tessellationIsolines == rhs.tessellationIsolines ) && + ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && + ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( + uint32_t minVertexInputBindingStrideAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( + PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetPropertiesKHR & + operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR & + operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & + setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minVertexInputBindingStrideAlignment ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void * pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDevicePresentIdFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePresentIdFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {} ) VULKAN_HPP_NOEXCEPT + : presentId( presentId_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePresentIdFeaturesKHR & + operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentIdFeaturesKHR & + operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & + setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT + { + presentId = presentId_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); +# endif + } + + bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentId = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == + sizeof( VkPhysicalDevicePresentIdFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePresentIdFeaturesKHR; + }; + + struct PhysicalDevicePresentWaitFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {} ) VULKAN_HPP_NOEXCEPT + : presentWait( presentWait_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePresentWaitFeaturesKHR & + operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentWaitFeaturesKHR & + operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & + setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT + { + presentWait = presentWait_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentWait ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait ); +# endif + } + + bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentWait = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == + sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePresentWaitFeaturesKHR; + }; + + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveTopologyListRestart( primitiveTopologyListRestart_ ) + , primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( + VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyListRestart = primitiveTopologyListRestart_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) + VULKAN_HPP_NOEXCEPT + { + primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) && + ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart ); +# endif + } + + bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) == + sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value, + "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + }; + + struct PhysicalDevicePrivateDataFeatures + { + using NativeType = VkPhysicalDevicePrivateDataFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePrivateDataFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {} ) VULKAN_HPP_NOEXCEPT + : privateData( privateData_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePrivateDataFeatures & + operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & + setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); +# endif + } + + bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == + sizeof( VkPhysicalDevicePrivateDataFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeatures; + }; + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + + struct PhysicalDeviceSparseProperties + { + using NativeType = VkPhysicalDeviceSparseProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) + , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) + , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) + , residencyAlignedMipSize( residencyAlignedMipSize_ ) + , residencyNonResidentStrict( residencyNonResidentStrict_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSparseProperties & + operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( residencyStandard2DBlockShape, + residencyStandard2DMultisampleBlockShape, + residencyStandard3DBlockShape, + residencyAlignedMipSize, + residencyNonResidentStrict ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && + ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); +# endif + } + + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == + sizeof( VkPhysicalDeviceSparseProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" ); + + struct PhysicalDeviceProperties + { + using NativeType = VkPhysicalDeviceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( + uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion( apiVersion_ ) + , driverVersion( driverVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , deviceType( deviceType_ ) + , deviceName( deviceName_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + , limits( limits_ ) + , sparseProperties( sparseProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( apiVersion, + driverVersion, + vendorID, + deviceID, + deviceType, + deviceName, + pipelineCacheUUID, + limits, + sparseProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && + ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) && + ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == + sizeof( VkPhysicalDeviceProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProperties is not nothrow_move_constructible!" ); + + struct PhysicalDeviceProperties2 + { + using NativeType = VkPhysicalDeviceProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == + sizeof( VkPhysicalDeviceProperties2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProperties2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct PhysicalDeviceProtectedMemoryFeatures + { + using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedMemory( protectedMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryFeatures & + operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures & + operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == + sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + + struct PhysicalDeviceProtectedMemoryProperties + { + using NativeType = VkPhysicalDeviceProtectedMemoryProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedNoFault( protectedNoFault_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryProperties & + operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties & + operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedNoFault ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == + sizeof( VkPhysicalDeviceProtectedMemoryProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + + struct PhysicalDeviceProvokingVertexFeaturesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT + : provokingVertexLast( provokingVertexLast_ ) + , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( + PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexFeaturesEXT & + operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT & + operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexLast = provokingVertexLast_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && + ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == + sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + }; + + struct PhysicalDeviceProvokingVertexPropertiesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT + : provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ) + , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( + PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexPropertiesEXT & + operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT & + operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && + ( transformFeedbackPreservesTriangleFanProvokingVertex == + rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == + sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexPropertiesEXT; + }; + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPushDescriptors( maxPushDescriptors_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( + PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePushDescriptorPropertiesKHR & + operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR & + operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == + sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDevicePushDescriptorPropertiesKHR; + }; + + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {} ) VULKAN_HPP_NOEXCEPT + : formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( + PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & + operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & + operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler( + VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT + { + formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler ); +# endif + } + + bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == + sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + }; + + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM + { + using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ ) + , rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ ) + , rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( + VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & operator =( + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & + operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( + &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & + setRasterizationOrderColorAttachmentAccess( + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & + setRasterizationOrderDepthAttachmentAccess( + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & + setRasterizationOrderStencilAttachmentAccess( + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + rasterizationOrderColorAttachmentAccess, + rasterizationOrderDepthAttachmentAccess, + rasterizationOrderStencilAttachmentAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) && + ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) && + ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess ); +# endif + } + + bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {}; + }; + VULKAN_HPP_STATIC_ASSERT( + sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ) == + sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>::value, + "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + }; + + struct PhysicalDeviceRayQueryFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {} ) VULKAN_HPP_NOEXCEPT + : rayQuery( rayQuery_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayQueryFeaturesKHR & + operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & + setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); +# endif + } + + bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayQueryFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {} ) VULKAN_HPP_NOEXCEPT + : rayTracingMotionBlur( rayTracingMotionBlur_ ) + , rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( + PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMotionBlurFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & + operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & + operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlur = rayTracingMotionBlur_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlurPipelineTraceRaysIndirect( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) && + ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV; + }; + + struct PhysicalDeviceRayTracingPipelineFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {} ) VULKAN_HPP_NOEXCEPT + : rayTracingPipeline( rayTracingPipeline_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) + , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ) + , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( + PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelineFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR & + operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipeline = rayTracingPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineShaderGroupHandleCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + rayTracingPipeline, + rayTracingPipelineShaderGroupHandleCaptureReplay, + rayTracingPipelineShaderGroupHandleCaptureReplayMixed, + rayTracingPipelineTraceRaysIndirect, + rayTraversalPrimitiveCulling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplay == + rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == + rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && + ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelinePropertiesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRayRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {}, + uint32_t maxRayDispatchInvocationCount_ = {}, + uint32_t shaderGroupHandleAlignment_ = {}, + uint32_t maxRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRayRecursionDepth( maxRayRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) + , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ) + , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ) + , maxRayHitAttributeSize( maxRayHitAttributeSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( + PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelinePropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelinePropertiesKHR & + operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR & + operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRayRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + shaderGroupHandleCaptureReplaySize, + maxRayDispatchInvocationCount, + shaderGroupHandleAlignment, + maxRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && + ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && + ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && + ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRayRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + uint32_t maxRayDispatchInvocationCount = {}; + uint32_t shaderGroupHandleAlignment = {}; + uint32_t maxRayHitAttributeSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + }; + + struct PhysicalDeviceRayTracingPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxTriangleCount( maxTriangleCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPropertiesNV & + operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV & + operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + maxGeometryCount, + maxInstanceCount, + maxTriangleCount, + maxDescriptorSetAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && + ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && + ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == + sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTest( representativeFragmentTest_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTest == rhs.representativeFragmentTest ); +# endif + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == + sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; + + struct PhysicalDeviceRobustness2FeaturesEXT + { + using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess2( robustBufferAccess2_ ) + , robustImageAccess2( robustImageAccess2_ ) + , nullDescriptor( nullDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2FeaturesEXT & + operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT & + operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && + ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == + sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + struct PhysicalDeviceRobustness2PropertiesEXT + { + using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) + , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2PropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2PropertiesEXT & + operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT & + operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && + ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == + sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) + , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) + , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) + , variableSampleLocations( variableSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( + PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSampleLocationsPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT & + operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + sampleLocationSampleCounts, + maxSampleLocationGridSize, + sampleLocationCoordinateRange, + sampleLocationSubPixelBits, + variableSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && + ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && + ( variableSampleLocations == rhs.variableSampleLocations ); +# endif + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == + sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + + struct PhysicalDeviceSamplerFilterMinmaxProperties + { + using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT + : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( + PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerFilterMinmaxProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties & + operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) == + sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT + : samplerYcbcrConversion( samplerYcbcrConversion_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( + PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerYcbcrConversionFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures & + operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, samplerYcbcrConversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) == + sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct PhysicalDeviceScalarBlockLayoutFeatures + { + using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) + VULKAN_HPP_NOEXCEPT : scalarBlockLayout( scalarBlockLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceScalarBlockLayoutFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceScalarBlockLayoutFeatures & + operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures & + operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, scalarBlockLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); +# endif + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == + sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + { + using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, separateDepthStencilLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); +# endif + } + + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == + sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ ) + , shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ ) + , shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ ) + , shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ ) + , shaderBufferFloat64AtomicMinMax( shaderBufferFloat64AtomicMinMax_ ) + , shaderSharedFloat16Atomics( shaderSharedFloat16Atomics_ ) + , shaderSharedFloat16AtomicAdd( shaderSharedFloat16AtomicAdd_ ) + , shaderSharedFloat16AtomicMinMax( shaderSharedFloat16AtomicMinMax_ ) + , shaderSharedFloat32AtomicMinMax( shaderSharedFloat32AtomicMinMax_ ) + , shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ ) + , shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ ) + , sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( + PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat32AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat64AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat32AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat64AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderImageFloat32AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax( + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat16Atomics, + shaderBufferFloat16AtomicAdd, + shaderBufferFloat16AtomicMinMax, + shaderBufferFloat32AtomicMinMax, + shaderBufferFloat64AtomicMinMax, + shaderSharedFloat16Atomics, + shaderSharedFloat16AtomicAdd, + shaderSharedFloat16AtomicMinMax, + shaderSharedFloat32AtomicMinMax, + shaderSharedFloat64AtomicMinMax, + shaderImageFloat32AtomicMinMax, + sparseImageFloat32AtomicMinMax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) && + ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) && + ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) && + ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) && + ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && + ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) && + ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) && + ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) && + ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) && + ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax ) && + ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax ) && + ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ) + , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ) + , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ) + , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ) + , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ) + , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ) + , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ) + , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ) + , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ) + , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ) + , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ) + , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( + PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloatFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & + operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat32Atomics, + shaderBufferFloat32AtomicAdd, + shaderBufferFloat64Atomics, + shaderBufferFloat64AtomicAdd, + shaderSharedFloat32Atomics, + shaderSharedFloat32AtomicAdd, + shaderSharedFloat64Atomics, + shaderSharedFloat64AtomicAdd, + shaderImageFloat32Atomics, + shaderImageFloat32AtomicAdd, + sparseImageFloat32Atomics, + sparseImageFloat32AtomicAdd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && + ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && + ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && + ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && + ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && + ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && + ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && + ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && + ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && + ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && + ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && + ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicInt64Features + { + using NativeType = VkPhysicalDeviceShaderAtomicInt64Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicInt64Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicInt64Features & + operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features & + operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == + sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + + struct PhysicalDeviceShaderClockFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupClock( shaderSubgroupClock_ ) + , shaderDeviceClock( shaderDeviceClock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderClockFeaturesKHR & + operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR & + operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupClock = shaderSubgroupClock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderDeviceClock = shaderDeviceClock_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); +# endif + } + + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + + struct PhysicalDeviceShaderCoreProperties2AMD + { + using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderCoreFeatures( shaderCoreFeatures_ ) + , activeComputeUnitCount( activeComputeUnitCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreProperties2AMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreProperties2AMD & + operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD & + operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == + sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + + struct PhysicalDeviceShaderCorePropertiesAMD + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderEngineCount( shaderEngineCount_ ) + , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) + , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) + , simdPerComputeUnit( simdPerComputeUnit_ ) + , wavefrontsPerSimd( wavefrontsPerSimd_ ) + , wavefrontSize( wavefrontSize_ ) + , sgprsPerSimd( sgprsPerSimd_ ) + , minSgprAllocation( minSgprAllocation_ ) + , maxSgprAllocation( maxSgprAllocation_ ) + , sgprAllocationGranularity( sgprAllocationGranularity_ ) + , vgprsPerSimd( vgprsPerSimd_ ) + , minVgprAllocation( minVgprAllocation_ ) + , maxVgprAllocation( maxVgprAllocation_ ) + , vgprAllocationGranularity( vgprAllocationGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesAMD & + operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD & + operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderEngineCount, + shaderArraysPerEngineCount, + computeUnitsPerShaderArray, + simdPerComputeUnit, + wavefrontsPerSimd, + wavefrontSize, + sgprsPerSimd, + minSgprAllocation, + maxSgprAllocation, + sgprAllocationGranularity, + vgprsPerSimd, + minVgprAllocation, + maxVgprAllocation, + vgprAllocationGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && + ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && + ( wavefrontSize == rhs.wavefrontSize ) && ( sgprsPerSimd == rhs.sgprsPerSimd ) && + ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == + sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( + PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setShaderDemoteToHelperInvocation( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDemoteToHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) == + sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value, + "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + }; + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + struct PhysicalDeviceShaderDrawParametersFeatures + { + using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDrawParameters( shaderDrawParameters_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( + PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDrawParametersFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderDrawParametersFeatures & + operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures & + operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == + sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + struct PhysicalDeviceShaderFloat16Int8Features + { + using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloat16Int8Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderFloat16Int8Features & + operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features & + operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & + setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & + setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == + sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderImageInt64Atomics( shaderImageInt64Atomics_ ) + , sparseImageInt64Atomics( sparseImageInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && + ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT : imageFootprint( imageFootprint_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( + PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageFootprintFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV & + operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & + setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFootprint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + + struct PhysicalDeviceShaderIntegerDotProductFeatures + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderIntegerDotProduct( shaderIntegerDotProduct_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( + PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductFeatures & + operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductFeatures & + operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerDotProduct ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) == + sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductFeatures; + }; + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + + struct PhysicalDeviceShaderIntegerDotProductProperties + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {} ) + VULKAN_HPP_NOEXCEPT + : integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) + , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) + , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) + , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) + , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) + , integerDotProduct4x8BitPackedMixedSignednessAccelerated( + integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) + , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) + , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) + , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) + , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) + , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) + , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) + , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) + , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitSignedAccelerated( + integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitSignedAccelerated( + integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitSignedAccelerated( + integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitSignedAccelerated( + integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( + PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductProperties & + operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductProperties & + operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct8BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct8BitUnsignedAccelerated = integerDotProduct8BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & setIntegerDotProduct8BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct8BitSignedAccelerated = integerDotProduct8BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct8BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct8BitMixedSignednessAccelerated = integerDotProduct8BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct4x8BitPackedUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct4x8BitPackedUnsignedAccelerated = integerDotProduct4x8BitPackedUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct4x8BitPackedSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct4x8BitPackedSignedAccelerated = integerDotProduct4x8BitPackedSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct4x8BitPackedMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct4x8BitPackedMixedSignednessAccelerated = + integerDotProduct4x8BitPackedMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct16BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct16BitUnsignedAccelerated = integerDotProduct16BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct16BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct16BitSignedAccelerated = integerDotProduct16BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct16BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct16BitMixedSignednessAccelerated = integerDotProduct16BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct32BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct32BitUnsignedAccelerated = integerDotProduct32BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct32BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct32BitSignedAccelerated = integerDotProduct32BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct32BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct32BitMixedSignednessAccelerated = integerDotProduct32BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct64BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct64BitUnsignedAccelerated = integerDotProduct64BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct64BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct64BitSignedAccelerated = integerDotProduct64BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProduct64BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProduct64BitMixedSignednessAccelerated = integerDotProduct64BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating8BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating8BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating8BitSignedAccelerated = + integerDotProductAccumulatingSaturating8BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating16BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating16BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating16BitSignedAccelerated = + integerDotProductAccumulatingSaturating16BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating32BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating32BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating32BitSignedAccelerated = + integerDotProductAccumulatingSaturating32BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating64BitUnsignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating64BitSignedAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating64BitSignedAccelerated = + integerDotProductAccumulatingSaturating64BitSignedAccelerated_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductProperties & + setIntegerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) + VULKAN_HPP_NOEXCEPT + { + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == + rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == + rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == + rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == + rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == + rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) == + sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductProperties; + }; + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerFunctions2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == + sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) + VULKAN_HPP_NOEXCEPT : shaderSMBuiltins( shaderSMBuiltins_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( + PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & + setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, + uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSMCount( shaderSMCount_ ) + , shaderWarpsPerSM( shaderWarpsPerSM_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( + PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsPropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & + operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && + ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == + sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + { + using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupExtendedTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == + sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( + &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) + VULKAN_HPP_NOEXCEPT + { + shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupUniformControlFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = + StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value, + "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + }; + + struct PhysicalDeviceShaderTerminateInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderTerminateInvocation( shaderTerminateInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( + PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTerminateInvocationFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderTerminateInvocationFeatures & + operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeatures & + operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTerminateInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) == + sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTerminateInvocationFeatures; + }; + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + + struct PhysicalDeviceShadingRateImageFeaturesNV + { + using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImage( shadingRateImage_ ) + , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( + PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImageFeaturesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShadingRateImageFeaturesNV & + operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV & + operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImage = shadingRateImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); +# endif + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == + sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImageFeaturesNV; + }; + + struct PhysicalDeviceShadingRateImagePropertiesNV + { + using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateTexelSize( shadingRateTexelSize_ ) + , shadingRatePaletteSize( shadingRatePaletteSize_ ) + , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( + PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImagePropertiesNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShadingRateImagePropertiesNV & + operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV & + operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && + ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); +# endif + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == + sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImagePropertiesNV; + }; + + struct PhysicalDeviceSparseImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceSparseImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSparseImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , type( type_ ) + , samples( samples_ ) + , usage( usage_ ) + , tiling( tiling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSparseImageFormatInfo2 & + operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2 & + operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & + setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, samples, usage, tiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && + ( samples == rhs.samples ) && ( usage == rhs.usage ) && ( tiling == rhs.tiling ); +# endif + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == + sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSparseImageFormatInfo2; + }; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct PhysicalDeviceSubgroupProperties + { + using NativeType = VkPhysicalDeviceSubgroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT + : subgroupSize( subgroupSize_ ) + , supportedStages( supportedStages_ ) + , supportedOperations( supportedOperations_ ) + , quadOperationsInAllStages( quadOperationsInAllStages_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupProperties & + operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && + ( supportedStages == rhs.supportedStages ) && ( supportedOperations == rhs.supportedOperations ) && + ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == + sizeof( VkPhysicalDeviceSubgroupProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupProperties; + }; + + struct PhysicalDeviceSubgroupSizeControlFeatures + { + using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT + : subgroupSizeControl( subgroupSizeControl_ ) + , computeFullSubgroups( computeFullSubgroups_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( + PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupSizeControlFeatures & + operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeatures & + operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlFeatures; + }; + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + + struct PhysicalDeviceSubgroupSizeControlProperties + { + using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( + uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT + : minSubgroupSize( minSubgroupSize_ ) + , maxSubgroupSize( maxSubgroupSize_ ) + , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) + , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( + PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupSizeControlProperties & + operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlProperties & + operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && + ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && + ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlProperties; + }; + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {} ) + VULKAN_HPP_NOEXCEPT : subpassShading( subpassShading_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( + PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingFeaturesHUAWEI( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubpassShadingFeaturesHUAWEI & + operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingFeaturesHUAWEI & + operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & + setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT + { + subpassShading = subpassShading_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassShading ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI; + }; + + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {} ) VULKAN_HPP_NOEXCEPT + : maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( + PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingPropertiesHUAWEI( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubpassShadingPropertiesHUAWEI & + operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingPropertiesHUAWEI & + operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + void * pNext = {}; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) == + sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI; + }; + + struct PhysicalDeviceSurfaceInfo2KHR + { + using NativeType = VkPhysicalDeviceSurfaceInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT + : surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSurfaceInfo2KHR & + operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & + setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surface ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; +#else + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == + sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSurfaceInfo2KHR; + }; + + struct PhysicalDeviceSynchronization2Features + { + using NativeType = VkPhysicalDeviceSynchronization2Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceSynchronization2Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {} ) VULKAN_HPP_NOEXCEPT + : synchronization2( synchronization2_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSynchronization2Features( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSynchronization2Features & + operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2Features & + operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & + setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, synchronization2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default; +#else + bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); +# endif + } + + bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == + sizeof( VkPhysicalDeviceSynchronization2Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceSynchronization2Features; + }; + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT + { + using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : texelBufferAlignment( texelBufferAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + { + texelBufferAlignment = texelBufferAlignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, texelBufferAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + }; + + struct PhysicalDeviceTexelBufferAlignmentProperties + { + using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) + , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) + , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) + , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( + PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTexelBufferAlignmentProperties & + operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentProperties & + operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentProperties; + }; + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + + struct PhysicalDeviceTextureCompressionASTCHDRFeatures + { + using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT + : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( + PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTextureCompressionASTCHDRFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTextureCompressionASTCHDRFeatures & + operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeatures & + operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureCompressionASTC_HDR ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); +# endif + } + + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) == + sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures; + }; + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + + struct PhysicalDeviceTimelineSemaphoreFeatures + { + using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) + VULKAN_HPP_NOEXCEPT : timelineSemaphore( timelineSemaphore_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTimelineSemaphoreFeatures & + operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures & + operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timelineSemaphore ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); +# endif + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreFeatures; + }; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + + struct PhysicalDeviceTimelineSemaphoreProperties + { + using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) + VULKAN_HPP_NOEXCEPT : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( + PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTimelineSemaphoreProperties & + operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties & + operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); +# endif + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == + sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreProperties; + }; + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + + struct PhysicalDeviceToolProperties + { + using NativeType = VkPhysicalDeviceToolProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( + std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , version( version_ ) + , purposes( purposes_ ) + , description( description_ ) + , layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceToolProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, version, purposes, description, layer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceToolProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && + ( purposes == rhs.purposes ) && ( description == rhs.description ) && ( layer == rhs.layer ); +# endif + } + + bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == + sizeof( VkPhysicalDeviceToolProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceToolProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceToolProperties; + }; + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + + struct PhysicalDeviceTransformFeedbackFeaturesEXT + { + using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT + : transformFeedback( transformFeedback_ ) + , geometryStreams( geometryStreams_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( + PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT & + operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedback = transformFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + { + geometryStreams = geometryStreams_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transformFeedback, geometryStreams ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && + ( geometryStreams == rhs.geometryStreams ); +# endif + } + + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; + }; + + struct PhysicalDeviceTransformFeedbackPropertiesEXT + { + using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT + : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) + , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) + , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) + , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) + , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) + , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) + , transformFeedbackQueries( transformFeedbackQueries_ ) + , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) + , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) + , transformFeedbackDraw( transformFeedbackDraw_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( + PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT & + operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxTransformFeedbackStreams, + maxTransformFeedbackBuffers, + maxTransformFeedbackBufferSize, + maxTransformFeedbackStreamDataSize, + maxTransformFeedbackBufferDataSize, + maxTransformFeedbackBufferDataStride, + transformFeedbackQueries, + transformFeedbackStreamsLinesTriangles, + transformFeedbackRasterizationStreamSelect, + transformFeedbackDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && + ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); +# endif + } + + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; + }; + + struct PhysicalDeviceUniformBufferStandardLayoutFeatures + { + using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( + PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceUniformBufferStandardLayoutFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, uniformBufferStandardLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); +# endif + } + + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) == + sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; + }; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + + struct PhysicalDeviceVariablePointersFeatures + { + using NativeType = VkPhysicalDeviceVariablePointersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVariablePointersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT + : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVariablePointersFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVariablePointersFeatures & + operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures & + operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); +# endif + } + + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == + sizeof( VkPhysicalDeviceVariablePointersFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVariablePointersFeatures; + }; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) + , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + }; + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( + PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVertexAttribDivisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + }; + + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexInputDynamicState( vertexInputDynamicState_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputDynamicState = vertexInputDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexInputDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexInputDynamicState == rhs.vertexInputDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoProfileKHR + { + using NativeType = VkVideoProfileKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoProfileKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid, + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : videoCodecOperation( videoCodecOperation_ ) + , chromaSubsampling( chromaSubsampling_ ) + , lumaBitDepth( lumaBitDepth_ ) + , chromaBitDepth( chromaBitDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoProfileKHR( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileKHR( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfileKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoProfileKHR & operator=( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileKHR & operator=( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setVideoCodecOperation( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperation = videoCodecOperation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setChromaSubsampling( + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT + { + chromaSubsampling = chromaSubsampling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & + setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + lumaBitDepth = lumaBitDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & + setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + chromaBitDepth = chromaBitDepth_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileKHR const & ) const = default; +# else + bool operator==( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && + ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && + ( chromaBitDepth == rhs.chromaBitDepth ); +# endif + } + + bool operator!=( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid; + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileKHR ) == sizeof( VkVideoProfileKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoProfileKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoProfileKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoProfilesKHR + { + using NativeType = VkVideoProfilesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoProfilesKHR( uint32_t profileCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ = {} ) VULKAN_HPP_NOEXCEPT + : profileCount( profileCount_ ) + , pProfiles( pProfiles_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoProfilesKHR( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfilesKHR( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfilesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoProfilesKHR & operator=( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfilesKHR & operator=( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = profileCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & + setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT + { + pProfiles = pProfiles_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, profileCount, pProfiles ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfilesKHR const & ) const = default; +# else + bool operator==( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && + ( pProfiles == rhs.pProfiles ); +# endif + } + + bool operator!=( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfilesKHR; + void * pNext = {}; + uint32_t profileCount = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfilesKHR ) == sizeof( VkVideoProfilesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoProfilesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoProfilesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceVideoFormatInfoKHR + { + using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles_ = {} ) VULKAN_HPP_NOEXCEPT + : imageUsage( imageUsage_ ) + , pVideoProfiles( pVideoProfiles_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVideoFormatInfoKHR & + operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageUsage, pVideoProfiles ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; +# else + bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ) && + ( pVideoProfiles == rhs.pVideoProfiles ); +# endif + } + + bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == + sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoFormatInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceVulkan11Features + { + using NativeType = VkPhysicalDeviceVulkan11Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + , multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + , protectedMemory( protectedMemory_ ) + , samplerYcbcrConversion( samplerYcbcrConversion_ ) + , shaderDrawParameters( shaderDrawParameters_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Features & + operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageBuffer16BitAccess, + uniformAndStorageBuffer16BitAccess, + storagePushConstant16, + storageInputOutput16, + multiview, + multiviewGeometryShader, + multiviewTessellationShader, + variablePointersStorageBuffer, + variablePointers, + protectedMemory, + samplerYcbcrConversion, + shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && + ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ) && ( protectedMemory == rhs.protectedMemory ) && + ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == + sizeof( VkPhysicalDeviceVulkan11Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Features; + }; + + struct PhysicalDeviceVulkan11Properties + { + using NativeType = VkPhysicalDeviceVulkan11Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + , subgroupSize( subgroupSize_ ) + , subgroupSupportedStages( subgroupSupportedStages_ ) + , subgroupSupportedOperations( subgroupSupportedOperations_ ) + , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ) + , pointClippingBehavior( pointClippingBehavior_ ) + , maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + , protectedNoFault( protectedNoFault_ ) + , maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Properties & + operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + deviceUUID, + driverUUID, + deviceLUID, + deviceNodeMask, + deviceLUIDValid, + subgroupSize, + subgroupSupportedStages, + subgroupSupportedOperations, + subgroupQuadOperationsInAllStages, + pointClippingBehavior, + maxMultiviewViewCount, + maxMultiviewInstanceIndex, + protectedNoFault, + maxPerSetDescriptors, + maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && + ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && + ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && + ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = + VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == + sizeof( VkPhysicalDeviceVulkan11Properties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Properties; + }; + + struct PhysicalDeviceVulkan12Features + { + using NativeType = VkPhysicalDeviceVulkan12Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT + : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) + , drawIndirectCount( drawIndirectCount_ ) + , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + , shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + , descriptorIndexing( descriptorIndexing_ ) + , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + , samplerFilterMinmax( samplerFilterMinmax_ ) + , scalarBlockLayout( scalarBlockLayout_ ) + , imagelessFramebuffer( imagelessFramebuffer_ ) + , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + , hostQueryReset( hostQueryReset_ ) + , timelineSemaphore( timelineSemaphore_ ) + , bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + , vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + , shaderOutputViewportIndex( shaderOutputViewportIndex_ ) + , shaderOutputLayer( shaderOutputLayer_ ) + , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan12Features & + operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT + { + samplerMirrorClampToEdge = samplerMirrorClampToEdge_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectCount = drawIndirectCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + { + descriptorIndexing = descriptorIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + { + samplerFilterMinmax = samplerFilterMinmax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputViewportIndex = shaderOutputViewportIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputLayer = shaderOutputLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + { + subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + samplerMirrorClampToEdge, + drawIndirectCount, + storageBuffer8BitAccess, + uniformAndStorageBuffer8BitAccess, + storagePushConstant8, + shaderBufferInt64Atomics, + shaderSharedInt64Atomics, + shaderFloat16, + shaderInt8, + descriptorIndexing, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray, + samplerFilterMinmax, + scalarBlockLayout, + imagelessFramebuffer, + uniformBufferStandardLayout, + shaderSubgroupExtendedTypes, + separateDepthStencilLayouts, + hostQueryReset, + timelineSemaphore, + bufferDeviceAddress, + bufferDeviceAddressCaptureReplay, + bufferDeviceAddressMultiDevice, + vulkanMemoryModel, + vulkanMemoryModelDeviceScope, + vulkanMemoryModelAvailabilityVisibilityChains, + shaderOutputViewportIndex, + shaderOutputLayer, + subgroupBroadcastDynamicId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && + ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && + ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && ( shaderFloat16 == rhs.shaderFloat16 ) && + ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == + rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == + rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == + rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == + rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && + ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && ( scalarBlockLayout == rhs.scalarBlockLayout ) && + ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && + ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && + ( hostQueryReset == rhs.hostQueryReset ) && ( timelineSemaphore == rhs.timelineSemaphore ) && + ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && + ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && + ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == + sizeof( VkPhysicalDeviceVulkan12Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Features; + }; + + struct PhysicalDeviceVulkan12Properties + { + using NativeType = VkPhysicalDeviceVulkan12Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + , denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + , supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan12Properties & + operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint64_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + driverID, + driverName, + driverInfo, + conformanceVersion, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments, + supportedDepthResolveModes, + supportedStencilResolveModes, + independentResolveNone, + independentResolve, + filterMinmaxSingleComponentFormats, + filterMinmaxImageComponentMapping, + maxTimelineSemaphoreValueDifference, + framebufferIntegerColorSampleCounts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && + ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) && + ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && + ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && + ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && + ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && + ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && + ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == + rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == + rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == + rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == + rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == + rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && + ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == + rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == + rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == + rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == + rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == + rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == + rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == + sizeof( VkPhysicalDeviceVulkan12Properties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Properties; + }; + + struct PhysicalDeviceVulkan13Features + { + using NativeType = VkPhysicalDeviceVulkan13Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {} ) VULKAN_HPP_NOEXCEPT + : robustImageAccess( robustImageAccess_ ) + , inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + , privateData( privateData_ ) + , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + , shaderTerminateInvocation( shaderTerminateInvocation_ ) + , subgroupSizeControl( subgroupSizeControl_ ) + , computeFullSubgroups( computeFullSubgroups_ ) + , synchronization2( synchronization2_ ) + , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) + , dynamicRendering( dynamicRendering_ ) + , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) + , maintenance4( maintenance4_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Features( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan13Features & + operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation( + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderZeroInitializeWorkgroupMemory( + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + robustImageAccess, + inlineUniformBlock, + descriptorBindingInlineUniformBlockUpdateAfterBind, + pipelineCreationCacheControl, + privateData, + shaderDemoteToHelperInvocation, + shaderTerminateInvocation, + subgroupSizeControl, + computeFullSubgroups, + synchronization2, + textureCompressionASTC_HDR, + shaderZeroInitializeWorkgroupMemory, + dynamicRendering, + shaderIntegerDotProduct, + maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) && + ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == + rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && + ( privateData == rhs.privateData ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && + ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) && + ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ) && ( synchronization2 == rhs.synchronization2 ) && + ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && + ( dynamicRendering == rhs.dynamicRendering ) && + ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == + sizeof( VkPhysicalDeviceVulkan13Features ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan13Features; + }; + + struct PhysicalDeviceVulkan13Properties + { + using NativeType = VkPhysicalDeviceVulkan13Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( + uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxInlineUniformTotalSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {} ) VULKAN_HPP_NOEXCEPT + : minSubgroupSize( minSubgroupSize_ ) + , maxSubgroupSize( maxSubgroupSize_ ) + , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) + , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + , maxInlineUniformTotalSize( maxInlineUniformTotalSize_ ) + , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) + , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) + , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) + , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) + , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) + , integerDotProduct4x8BitPackedMixedSignednessAccelerated( + integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) + , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) + , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) + , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) + , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) + , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) + , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) + , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) + , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitSignedAccelerated( + integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitSignedAccelerated( + integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitSignedAccelerated( + integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitSignedAccelerated( + integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) + , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) + , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) + , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) + , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + , maxBufferSize( maxBufferSize_ ) + {} + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Properties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan13Properties & + operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minSubgroupSize, + maxSubgroupSize, + maxComputeWorkgroupSubgroups, + requiredSubgroupSizeStages, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks, + maxInlineUniformTotalSize, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment, + maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && + ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && + ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == + rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == + rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) && + ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) && + ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == + rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == + rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == + rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == + rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == + rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && + ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxInlineUniformTotalSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == + sizeof( VkPhysicalDeviceVulkan13Properties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan13Properties; + }; + + struct PhysicalDeviceVulkanMemoryModelFeatures + { + using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT + : vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkanMemoryModelFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkanMemoryModelFeatures & + operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures & + operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); +# endif + } + + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == + sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkanMemoryModelFeatures; + }; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR + { + using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ) + , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ) + , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ) + , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayoutScalarBlockLayout( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout8BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout16BitAccess( + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + workgroupMemoryExplicitLayout, + workgroupMemoryExplicitLayoutScalarBlockLayout, + workgroupMemoryExplicitLayout8BitAccess, + workgroupMemoryExplicitLayout16BitAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && + ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && + ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && + ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); +# endif + } + + bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == + sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value, + "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + }; + + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {} ) VULKAN_HPP_NOEXCEPT + : ycbcr2plane444Formats( ycbcr2plane444Formats_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT + { + ycbcr2plane444Formats = ycbcr2plane444Formats_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcr2plane444Formats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + }; + + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + { + using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT : ycbcrImageArrays( ycbcrImageArrays_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( + PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrImageArraysFeaturesEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & + operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrImageArrays = ycbcrImageArrays_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcrImageArrays ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; + }; + + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures + { + using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( + VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) + VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); +# endif + } + + bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) == + sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value, + "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + }; + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + struct PipelineCacheCreateInfo + { + using NativeType = VkPipelineCacheCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo & + setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; +#else + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); +# endif + } + + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == + sizeof( VkPipelineCacheCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCacheCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineCacheCreateInfo; + }; + + struct PipelineCacheHeaderVersionOne + { + using NativeType = VkPipelineCacheHeaderVersionOne; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( + uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + std::array const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize( headerSize_ ) + , headerVersion( headerVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheHeaderVersionOne( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCacheHeaderVersionOne & + operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT + { + headerSize = headerSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & + setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT + { + headerVersion = headerVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT + { + vendorID = vendorID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT + { + deviceID = deviceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & + setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCacheUUID = pipelineCacheUUID_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default; +#else + bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && + ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( pipelineCacheUUID == rhs.pipelineCacheUUID ); +# endif + } + + bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == + sizeof( VkPipelineCacheHeaderVersionOne ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" ); + + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) + VULKAN_HPP_NOEXCEPT + : srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( + PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAdvancedStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT & + operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && + ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap ); +# endif + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) == + sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; + }; + + struct PipelineColorWriteCreateInfoEXT + { + using NativeType = VkPipelineColorWriteCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ) + , pColorWriteEnables( pColorWriteEnables_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) + : attachmentCount( static_cast( colorWriteEnables_.size() ) ) + , pColorWriteEnables( colorWriteEnables_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorWriteCreateInfoEXT & + operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + pColorWriteEnables = pColorWriteEnables_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT & setColorWriteEnables( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( colorWriteEnables_.size() ); + pColorWriteEnables = colorWriteEnables_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentCount, pColorWriteEnables ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pColorWriteEnables == rhs.pColorWriteEnables ); +# endif + } + + bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == + sizeof( VkPipelineColorWriteCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineColorWriteCreateInfoEXT; + }; + + struct PipelineCompilerControlCreateInfoAMD + { + using NativeType = VkPipelineCompilerControlCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCompilerControlCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : compilerControlFlags( compilerControlFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCompilerControlCreateInfoAMD & + operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD & + operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + { + compilerControlFlags = compilerControlFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compilerControlFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; +#else + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); +# endif + } + + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == + sizeof( VkPipelineCompilerControlCreateInfoAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineCompilerControlCreateInfoAMD; + }; + + struct PipelineCoverageModulationStateCreateInfoNV + { + using NativeType = VkPipelineCoverageModulationStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageModulationStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( coverageModulationTableCount_ ) + , pCoverageModulationTable( pCoverageModulationTable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageModulationStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) + , pCoverageModulationTable( coverageModulationTable_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCoverageModulationStateCreateInfoNV & + operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV & + operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + coverageModulationMode, + coverageModulationTableEnable, + coverageModulationTableCount, + pCoverageModulationTable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && + ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); +# endif + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineCoverageModulationStateCreateInfoNV; + }; + + struct PipelineCoverageReductionStateCreateInfoNV + { + using NativeType = VkPipelineCoverageReductionStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageReductionStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageReductionStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCoverageReductionStateCreateInfoNV & + operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV & + operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == + sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineCoverageReductionStateCreateInfoNV; + }; + + struct PipelineCoverageToColorStateCreateInfoNV + { + using NativeType = VkPipelineCoverageToColorStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCoverageToColorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageToColorEnable( coverageToColorEnable_ ) + , coverageToColorLocation( coverageToColorLocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( + PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineCoverageToColorStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCoverageToColorStateCreateInfoNV & + operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV & + operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); +# endif + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == + sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineCoverageToColorStateCreateInfoNV; + }; + + struct PipelineCreationFeedback + { + using NativeType = VkPipelineCreationFeedback; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , duration( duration_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedback( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, duration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedback const & ) const = default; +#else + bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( duration == rhs.duration ); +# endif + } + + bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {}; + uint64_t duration = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == + sizeof( VkPipelineCreationFeedback ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreationFeedback is not nothrow_move_constructible!" ); + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; + + struct PipelineCreationFeedbackCreateInfo + { + using NativeType = VkPipelineCreationFeedbackCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineCreationFeedbackCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) + , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pipelineStageCreationFeedbacks_ ) + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) + , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreationFeedbackCreateInfo & + operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfo & + operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineCreationFeedback( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreationFeedback = pPipelineCreationFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default; +#else + bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); +# endif + } + + bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == + sizeof( VkPipelineCreationFeedbackCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineCreationFeedbackCreateInfo; + }; + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + + struct PipelineDiscardRectangleStateCreateInfoEXT + { + using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( discardRectangleCount_ ) + , pDiscardRectangles( pDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineDiscardRectangleStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( static_cast( discardRectangles_.size() ) ) + , pDiscardRectangles( discardRectangles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineDiscardRectangleStateCreateInfoEXT & + operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT & + operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) + VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); +# endif + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == + sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineDiscardRectangleStateCreateInfoEXT; + }; + + struct PipelineExecutableInfoKHR + { + using NativeType = VkPipelineExecutableInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + , executableIndex( executableIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & + setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline, executableIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && + ( executableIndex == rhs.executableIndex ); +# endif + } + + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == + sizeof( VkPipelineExecutableInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInfoKHR; + }; + + struct PipelineExecutableInternalRepresentationKHR + { + using NativeType = VkPipelineExecutableInternalRepresentationKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineExecutableInternalRepresentationKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( + PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineExecutableInternalRepresentationKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineExecutableInternalRepresentationKHR( std::array const & name_, + std::array const & description_, + VULKAN_HPP_NAMESPACE::Bool32 isText_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableInternalRepresentationKHR & + operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR & + operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + size_t const &, + void * const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, description, isText, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && + ( pData == rhs.pData ); +# endif + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) == + sizeof( VkPipelineExecutableInternalRepresentationKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableInternalRepresentationKHR; + }; + + struct PipelineExecutablePropertiesKHR + { + using NativeType = VkPipelineExecutablePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : stages( stages_ ) + , name( name_ ) + , description( description_ ) + , subgroupSize( subgroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutablePropertiesKHR & + operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stages, name, description, subgroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; +#else + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && + ( description == rhs.description ) && ( subgroupSize == rhs.subgroupSize ); +# endif + } + + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == + sizeof( VkPipelineExecutablePropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineExecutablePropertiesKHR; + }; + + union PipelineExecutableStatisticValueKHR + { + using NativeType = VkPipelineExecutableStatisticValueKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & + setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkPipelineExecutableStatisticValueKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PipelineExecutableStatisticKHR + { + using NativeType = VkPipelineExecutableStatisticKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( + std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT + : name( name_ ) + , description( description_ ) + , format( format_ ) + , value( value_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableStatisticKHR & + operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, description, format, value ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == + sizeof( VkPipelineExecutableStatisticKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineExecutableStatisticKHR; + }; + + struct PipelineFragmentShadingRateEnumStateCreateInfoNV + { + using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, + VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT + : shadingRateType( shadingRateType_ ) + , shadingRate( shadingRate_ ) + , combinerOps( combinerOps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateEnumStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineFragmentShadingRateEnumStateCreateInfoNV & + operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV & + operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateType = shadingRateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( + std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && + ( shadingRate == rhs.shadingRate ) && ( combinerOps == rhs.combinerOps ); +# endif + } + + bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) == + sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; + }; + + struct PipelineFragmentShadingRateStateCreateInfoKHR + { + using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, + VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT + : fragmentSize( fragmentSize_ ) + , combinerOps( combinerOps_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateStateCreateInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineFragmentShadingRateStateCreateInfoKHR & + operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR & + operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT + { + fragmentSize = fragmentSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( + std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentSize, combinerOps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && + ( combinerOps == rhs.combinerOps ); +# endif + } + + bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) == + sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateStateCreateInfoKHR; + }; + + struct PipelineInfoKHR + { + using NativeType = VkPipelineInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & + setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInfoKHR const & ) const = default; +#else + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineInfoKHR; + }; + + struct PushConstantRange + { + using NativeType = VkPushConstantRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + : PushConstantRange( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & + setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( stageFlags, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; +#else + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantRange is not nothrow_move_constructible!" ); + + struct PipelineLayoutCreateInfo + { + using NativeType = VkPipelineLayoutCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pushConstantRanges_ = {} ) + : flags( flags_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setSetLayouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; +#else + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); +# endif + } + + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == + sizeof( VkPipelineLayoutCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLayoutCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineLayoutCreateInfo; + }; + + struct PipelineLibraryCreateInfoKHR + { + using NativeType = VkPipelineLibraryCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {} ) VULKAN_HPP_NOEXCEPT + : libraryCount( libraryCount_ ) + , pLibraries( pLibraries_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + : libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & + setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & setLibraries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) + VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, libraryCount, pLibraries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && + ( pLibraries == rhs.pLibraries ); +# endif + } + + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == + sizeof( VkPipelineLibraryCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; + + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , conservativeRasterizationMode( conservativeRasterizationMode_ ) + , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT( + VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationConservativeStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); +# endif + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + float extraPrimitiveOverestimationSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value, + "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationConservativeStateCreateInfoEXT; + }; + + struct PipelineRasterizationDepthClipStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( + PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationDepthClipStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT & + operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; + }; + + struct PipelineRasterizationLineStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT + : lineRasterizationMode( lineRasterizationMode_ ) + , stippledLineEnable( stippledLineEnable_ ) + , lineStippleFactor( lineStippleFactor_ ) + , lineStipplePattern( lineStipplePattern_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationLineStateCreateInfoEXT & + operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT & + operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + lineRasterizationMode = lineRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + { + stippledLineEnable = stippledLineEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + { + lineStippleFactor = lineStippleFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + { + lineStipplePattern = lineStipplePattern_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); +# endif + } + + bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationLineStateCreateInfoEXT; + }; + + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex ) VULKAN_HPP_NOEXCEPT + : provokingVertexMode( provokingVertexMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationProvokingVertexStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexMode = provokingVertexMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); +# endif + } + + bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value, + "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; + }; + + struct PipelineRasterizationStateRasterizationOrderAMD + { + using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT + : rasterizationOrder( rasterizationOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateRasterizationOrderAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationStateRasterizationOrderAMD & + operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD & + operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrder = rasterizationOrder_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rasterizationOrder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; +#else + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); +# endif + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) == + sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationStateRasterizationOrderAMD; + }; + + struct PipelineRasterizationStateStreamCreateInfoEXT + { + using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rasterizationStream( rasterizationStream_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( + PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateStreamCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationStateStreamCreateInfoEXT & + operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT & + operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationStream = rasterizationStream_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rasterizationStream ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rasterizationStream == rhs.rasterizationStream ); +# endif + } + + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) == + sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRasterizationStateStreamCreateInfoEXT; + }; + + struct PipelineRenderingCreateInfo + { + using NativeType = VkPipelineRenderingCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) + VULKAN_HPP_NOEXCEPT + : viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentFormats( pColorAttachmentFormats_ ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR + PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRenderingCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo( + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) + : viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + viewMask, + colorAttachmentCount, + pColorAttachmentFormats, + depthAttachmentFormat, + stencilAttachmentFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRenderingCreateInfo const & ) const = default; +#else + bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && + ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); +# endif + } + + bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == + sizeof( VkPipelineRenderingCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineRenderingCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRenderingCreateInfo; + }; + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + + struct PipelineRepresentativeFragmentTestStateCreateInfoNV + { + using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( + PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV( + VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRepresentativeFragmentTestStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTestEnable = representativeFragmentTestEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTestEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); +# endif + } + + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) == + sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible< + VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, + "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; + }; + + struct PipelineSampleLocationsStateCreateInfoEXT + { + using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationsEnable( sampleLocationsEnable_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( + PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineSampleLocationsStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineSampleLocationsStateCreateInfoEXT & + operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT & + operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == + sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineSampleLocationsStateCreateInfoEXT; + }; + + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo + { + using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : requiredSubgroupSize( requiredSubgroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( + PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineShaderStageRequiredSubgroupSizeCreateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageRequiredSubgroupSizeCreateInfo & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfo & + operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, requiredSubgroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default; +#else + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); +# endif + } + + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + void * pNext = {}; + uint32_t requiredSubgroupSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) == + sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + }; + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + + struct PipelineTessellationDomainOriginStateCreateInfo + { + using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT : domainOrigin( domainOrigin_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineTessellationDomainOriginStateCreateInfo( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineTessellationDomainOriginStateCreateInfo & + operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo & + operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + domainOrigin = domainOrigin_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, domainOrigin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); +# endif + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) == + sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineTessellationDomainOriginStateCreateInfo; + }; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + struct VertexInputBindingDivisorDescriptionEXT + { + using NativeType = VkVertexInputBindingDivisorDescriptionEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescriptionEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputBindingDivisorDescriptionEXT & + operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescriptionEXT & + operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & + setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & + setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, divisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default; +#else + bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); +# endif + } + + bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t divisor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) == + sizeof( VkVertexInputBindingDivisorDescriptionEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" ); + + struct PipelineVertexInputDivisorStateCreateInfoEXT + { + using NativeType = VkPipelineVertexInputDivisorStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {} ) + VULKAN_HPP_NOEXCEPT + : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) + , pVertexBindingDivisors( pVertexBindingDivisors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( + PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) + : vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) + , pVertexBindingDivisors( vertexBindingDivisors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoEXT & + operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & + setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); +# endif + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) == + sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineVertexInputDivisorStateCreateInfoEXT; + }; + + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( customSampleOrderCount_ ) + , pCustomSampleOrders( pCustomSampleOrders_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportCoarseSampleOrderStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + customSampleOrders_ ) + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) + , pCustomSampleOrders( customSampleOrders_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + { + sampleOrderType = sampleOrderType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = customSampleOrderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + customSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && + ( pCustomSampleOrders == rhs.pCustomSampleOrders ); +# endif + } + + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == + sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; + }; + + struct PipelineViewportDepthClipControlCreateInfoEXT + { + using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( + VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {} ) VULKAN_HPP_NOEXCEPT : negativeOneToOne( negativeOneToOne_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( + PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClipControlCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportDepthClipControlCreateInfoEXT & + operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportDepthClipControlCreateInfoEXT & + operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & + setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + { + negativeOneToOne = negativeOneToOne_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, negativeOneToOne ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne ); +# endif + } + + bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) == + sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportDepthClipControlCreateInfoEXT; + }; + + struct PipelineViewportExclusiveScissorStateCreateInfoNV + { + using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT + : exclusiveScissorCount( exclusiveScissorCount_ ) + , pExclusiveScissors( pExclusiveScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( + PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportExclusiveScissorStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + : exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ) + , pExclusiveScissors( exclusiveScissors_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = exclusiveScissorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) + VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && ( pExclusiveScissors == rhs.pExclusiveScissors ); +# endif + } + + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) == + sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; + }; + + struct ShadingRatePaletteNV + { + using NativeType = VkShadingRatePaletteNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( + uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) + , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) + , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & + setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setPShadingRatePaletteEntries( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV & setShadingRatePaletteEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShadingRatePaletteNV const & ) const = default; +#else + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && + ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); +# endif + } + + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShadingRatePaletteNV is not nothrow_move_constructible!" ); + + struct PipelineViewportShadingRateImageStateCreateInfoNV + { + using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( viewportCount_ ) + , pShadingRatePalettes( pShadingRatePalettes_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( + PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportShadingRateImageStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePalettes_ ) + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( static_cast( shadingRatePalettes_.size() ) ) + , pShadingRatePalettes( shadingRatePalettes_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = + *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImageEnable = shadingRateImageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pShadingRatePalettes == rhs.pShadingRatePalettes ); +# endif + } + + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) == + sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportShadingRateImageStateCreateInfoNV; + }; + + struct ViewportSwizzleNV + { + using NativeType = VkViewportSwizzleNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + , w( w_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & + setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & + setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & + setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & + setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT + { + w = w_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z, w ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; +#else + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); +# endif + } + + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViewportSwizzleNV is not nothrow_move_constructible!" ); + + struct PipelineViewportSwizzleStateCreateInfoNV + { + using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewportSwizzles( pViewportSwizzles_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( + PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportSwizzleStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportSwizzles_ ) + : flags( flags_ ) + , viewportCount( static_cast( viewportSwizzles_.size() ) ) + , pViewportSwizzles( viewportSwizzles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportSwizzleStateCreateInfoNV & + operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV & + operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportSwizzles == rhs.pViewportSwizzles ); +# endif + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == + sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportSwizzleStateCreateInfoNV; + }; + + struct ViewportWScalingNV + { + using NativeType = VkViewportWScalingNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( xcoeff, ycoeff ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportWScalingNV const & ) const = default; +#else + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); +# endif + } + + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float xcoeff = {}; + float ycoeff = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViewportWScalingNV is not nothrow_move_constructible!" ); + + struct PipelineViewportWScalingStateCreateInfoNV + { + using NativeType = VkPipelineViewportWScalingStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::ePipelineViewportWScalingStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( viewportCount_ ) + , pViewportWScalings( pViewportWScalings_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( + PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PipelineViewportWScalingStateCreateInfoNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportWScalings_ ) + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( static_cast( viewportWScalings_.size() ) ) + , pViewportWScalings( viewportWScalings_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportWScalingStateCreateInfoNV & + operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV & + operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + viewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportWScalings == rhs.pViewportWScalings ); +# endif + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == + sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PipelineViewportWScalingStateCreateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_GGP ) + struct PresentFrameTokenGGP + { + using NativeType = VkPresentFrameTokenGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT + : frameToken( frameToken_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT + { + frameToken = frameToken_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, frameToken ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); + } + + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentFrameTokenGGP is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PresentFrameTokenGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct PresentIdKHR + { + using NativeType = VkPresentIdKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, + const uint64_t * pPresentIds_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pPresentIds( pPresentIds_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentIdKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_ ) + : swapchainCount( static_cast( presentIds_.size() ) ), pPresentIds( presentIds_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT + { + pPresentIds = pPresentIds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( presentIds_.size() ); + pPresentIds = presentIds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pPresentIds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentIdKHR const & ) const = default; +#else + bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pPresentIds == rhs.pPresentIds ); +# endif + } + + bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint64_t * pPresentIds = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentIdKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PresentIdKHR; + }; + + struct PresentInfoKHR + { + using NativeType = VkPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , swapchainCount( static_cast( swapchains_.size() ) ) + , pSwapchains( swapchains_.data() ) + , pImageIndices( imageIndices_.data() ) + , pResults( results_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +# else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & + setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & + setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setSwapchains( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setImageIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setResults( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentInfoKHR const & ) const = default; +#else + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && + ( pSwapchains == rhs.pSwapchains ) && ( pImageIndices == rhs.pImageIndices ) && + ( pResults == rhs.pResults ); +# endif + } + + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PresentInfoKHR; + }; + + struct RectLayerKHR + { + using NativeType = VkRectLayerKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, + uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + , layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RectLayerKHR( *reinterpret_cast( &rhs ) ) + {} + + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) + : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & + setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & + setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT + { + layer = layer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, extent, layer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RectLayerKHR const & ) const = default; +#else + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); +# endif + } + + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + uint32_t layer = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RectLayerKHR is not nothrow_move_constructible!" ); + + struct PresentRegionKHR + { + using NativeType = VkPresentRegionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentRegionKHR( uint32_t rectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangleCount( rectangleCount_ ) + , pRectangles( pRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentRegionKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = rectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & + setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pRectangles = pRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR & setRectangles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + VULKAN_HPP_NOEXCEPT + { + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( rectangleCount, pRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; +#else + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); +# endif + } + + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentRegionKHR is not nothrow_move_constructible!" ); + + struct PresentRegionsKHR + { + using NativeType = VkPresentRegionsKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & + setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionsKHR const & ) const = default; +#else + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentRegionsKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PresentRegionsKHR; + }; + + struct PresentTimeGOOGLE + { + using NativeType = VkPresentTimeGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & + setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT + { + desiredPresentTime = desiredPresentTime_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimeGOOGLE const & ) const = default; +#else + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); +# endif + } + + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentTimeGOOGLE is not nothrow_move_constructible!" ); + + struct PresentTimesInfoGOOGLE + { + using NativeType = VkPresentTimesInfoGOOGLE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pTimes( pTimes_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + : swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & + setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT + { + pTimes = pTimes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE & setTimes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) + VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pTimes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; +#else + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && + ( pTimes == rhs.pTimes ); +# endif + } + + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == + sizeof( VkPresentTimesInfoGOOGLE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PresentTimesInfoGOOGLE; + }; + + struct PrivateDataSlotCreateInfo + { + using NativeType = VkPrivateDataSlotCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PrivateDataSlotCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default; +#else + bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == + sizeof( VkPrivateDataSlotCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = PrivateDataSlotCreateInfo; + }; + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + + struct ProtectedSubmitInfo + { + using NativeType = VkProtectedSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedSubmit( protectedSubmit_ ) + {} + + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & + setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT + { + protectedSubmit = protectedSubmit_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedSubmit ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ProtectedSubmitInfo const & ) const = default; +#else + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); +# endif + } + + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ProtectedSubmitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ProtectedSubmitInfo; + }; + + struct QueryPoolCreateInfo + { + using NativeType = VkQueryPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolCreateInfo const & ) const = default; +#else + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); +# endif + } + + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueryPoolCreateInfo; + }; + + struct QueryPoolPerformanceCreateInfoKHR + { + using NativeType = VkQueryPoolPerformanceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueryPoolPerformanceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( counterIndexCount_ ) + , pCounterIndices( pCounterIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR + QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR( + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) + : queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( static_cast( counterIndices_.size() ) ) + , pCounterIndices( counterIndices_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolPerformanceCreateInfoKHR & + operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & + setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & + setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = counterIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & + setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + { + pCounterIndices = pCounterIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR & setCounterIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; +#else + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); +# endif + } + + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == + sizeof( VkQueryPoolPerformanceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueryPoolPerformanceCreateInfoKHR; + }; + + struct QueryPoolPerformanceQueryCreateInfoINTEL + { + using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT + : performanceCountersSampling( performanceCountersSampling_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) + VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceQueryCreateInfoINTEL( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL & + operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + { + performanceCountersSampling = performanceCountersSampling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCountersSampling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; +#else + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( performanceCountersSampling == rhs.performanceCountersSampling ); +# endif + } + + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == + sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueryPoolPerformanceQueryCreateInfoINTEL; + }; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + + struct QueueFamilyCheckpointProperties2NV + { + using NativeType = VkQueueFamilyCheckpointProperties2NV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyCheckpointProperties2NV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyCheckpointProperties2NV & + operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV & + operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, checkpointExecutionStageMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); +# endif + } + + bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == + sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointProperties2NV; + }; + + struct QueueFamilyCheckpointPropertiesNV + { + using NativeType = VkQueueFamilyCheckpointPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + {} + + VULKAN_HPP_CONSTEXPR + QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyCheckpointPropertiesNV & + operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, checkpointExecutionStageMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); +# endif + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == + sizeof( VkQueueFamilyCheckpointPropertiesNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointPropertiesNV; + }; + + struct QueueFamilyGlobalPriorityPropertiesKHR + { + using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( + uint32_t priorityCount_ = {}, + std::array const & + priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } } ) VULKAN_HPP_NOEXCEPT + : priorityCount( priorityCount_ ) + , priorities( priorities_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyGlobalPriorityPropertiesKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyGlobalPriorityPropertiesKHR & + operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyGlobalPriorityPropertiesKHR & + operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & + setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT + { + priorityCount = priorityCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorities( + std::array priorities_ ) + VULKAN_HPP_NOEXCEPT + { + priorities = priorities_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priorityCount, priorities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & ) const = default; +#else + bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && + ( priorities == rhs.priorities ); +# endif + } + + bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + void * pNext = {}; + uint32_t priorityCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D + priorities = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == + sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueueFamilyGlobalPriorityPropertiesKHR; + }; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; + + struct QueueFamilyProperties + { + using NativeType = VkQueueFamilyProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags( queueFlags_ ) + , queueCount( queueCount_ ) + , timestampValidBits( timestampValidBits_ ) + , minImageTransferGranularity( minImageTransferGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; +#else + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && + ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); +# endif + } + + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyProperties is not nothrow_move_constructible!" ); + + struct QueueFamilyProperties2 + { + using NativeType = VkQueueFamilyProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyProperties( queueFamilyProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueFamilyProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties2 const & ) const = default; +#else + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); +# endif + } + + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == + sizeof( VkQueueFamilyProperties2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyProperties2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueueFamilyProperties2; + }; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct QueueFamilyQueryResultStatusProperties2KHR + { + using NativeType = VkQueueFamilyQueryResultStatusProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eQueueFamilyQueryResultStatusProperties2KHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + QueueFamilyQueryResultStatusProperties2KHR( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT + : supported( supported_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusProperties2KHR( + QueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyQueryResultStatusProperties2KHR( VkQueueFamilyQueryResultStatusProperties2KHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : QueueFamilyQueryResultStatusProperties2KHR( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyQueryResultStatusProperties2KHR & + operator=( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyQueryResultStatusProperties2KHR & + operator=( VkQueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR & + setSupported( VULKAN_HPP_NAMESPACE::Bool32 supported_ ) VULKAN_HPP_NOEXCEPT + { + supported = supported_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkQueueFamilyQueryResultStatusProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkQueueFamilyQueryResultStatusProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supported ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyQueryResultStatusProperties2KHR const & ) const = default; +# else + bool operator==( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); +# endif + } + + bool operator!=( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR ) == + sizeof( VkQueueFamilyQueryResultStatusProperties2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "QueueFamilyQueryResultStatusProperties2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = QueueFamilyQueryResultStatusProperties2KHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct RayTracingShaderGroupCreateInfoKHR + { + using NativeType = VkRayTracingShaderGroupCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingShaderGroupCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {}, + const void * pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoKHR & + operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR & + operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + type, + generalShader, + closestHitShader, + anyHitShader, + intersectionShader, + pShaderGroupCaptureReplayHandle ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + const void * pShaderGroupCaptureReplayHandle = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == + sizeof( VkRayTracingShaderGroupCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoKHR; + }; + + struct RayTracingPipelineInterfaceCreateInfoKHR + { + using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, + uint32_t maxPipelineRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ) + , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( + RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : RayTracingPipelineInterfaceCreateInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingPipelineInterfaceCreateInfoKHR & + operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR & + operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && + ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); +# endif + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPipelineRayPayloadSize = {}; + uint32_t maxPipelineRayHitAttributeSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == + sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineInterfaceCreateInfoKHR; + }; + + struct RayTracingPipelineCreateInfoKHR + { + using NativeType = VkRayTracingPipelineCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoKHR & + operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & + setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInterface( + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState( + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + groupCount, + pGroups, + maxPipelineRayRecursionDepth, + pLibraryInfo, + pLibraryInterface, + pDynamicState, + layout, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && + ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxPipelineRayRecursionDepth = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == + sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoKHR; + }; + + struct RayTracingShaderGroupCreateInfoNV + { + using NativeType = VkRayTracingShaderGroupCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRayTracingShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoNV & + operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & + setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && + ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) && + ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == + sizeof( VkRayTracingShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + struct RayTracingPipelineCreateInfoNV + { + using NativeType = VkRayTracingPipelineCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR + RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) + : flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoNV & + operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & setStages( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & + setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + groupCount, + pGroups, + maxRecursionDepth, + layout, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) && + ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == + sizeof( VkRayTracingPipelineCreateInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoNV; + }; + + struct RefreshCycleDurationGOOGLE + { + using NativeType = VkRefreshCycleDurationGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT + : refreshDuration( refreshDuration_ ) + {} + + VULKAN_HPP_CONSTEXPR + RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( refreshDuration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( refreshDuration == rhs.refreshDuration ); +# endif + } + + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t refreshDuration = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == + sizeof( VkRefreshCycleDurationGOOGLE ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" ); + + struct RenderPassAttachmentBeginInfo + { + using NativeType = VkRenderPassAttachmentBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR + RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + : attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassAttachmentBeginInfo & + operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & + setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentCount, pAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; +#else + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ); +# endif + } + + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == + sizeof( VkRenderPassAttachmentBeginInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassAttachmentBeginInfo; + }; + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + + struct RenderPassBeginInfo + { + using NativeType = VkRenderPassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo( + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( static_cast( clearValues_.size() ) ) + , pClearValues( clearValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & + setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & + setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = clearValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & + setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo & setClearValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) + VULKAN_HPP_NOEXCEPT + { + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassBeginInfo const & ) const = default; +#else + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( framebuffer == rhs.framebuffer ) && ( renderArea == rhs.renderArea ) && + ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); +# endif + } + + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassBeginInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassBeginInfo; + }; + + struct SubpassDescription + { + using NativeType = VkSubpassDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescription( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setPreserveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, + pipelineBindPoint, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription const & ) const = default; +#else + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); +# endif + } + + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescription is not nothrow_move_constructible!" ); + + struct SubpassDependency + { + using NativeType = VkSubpassDependency; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDependency( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & + setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency const & ) const = default; +#else + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && + ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); +# endif + } + + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDependency is not nothrow_move_constructible!" ); + + struct RenderPassCreateInfo + { + using NativeType = VkRenderPassCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo( + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ = {} ) + : flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setSubpasses( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) + VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setDependencies( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo const & ) const = default; +#else + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); +# endif + } + + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo; + }; + + struct SubpassDescription2 + { + using NativeType = VkSubpassDescription2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescription2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2( + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setPreserveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + pipelineBindPoint, + viewMask, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription2 const & ) const = default; +#else + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( viewMask == rhs.viewMask ) && + ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); +# endif + } + + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescription2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassDescription2; + }; + using SubpassDescription2KHR = SubpassDescription2; + + struct SubpassDependency2 + { + using NativeType = VkSubpassDependency2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + , viewOffset( viewOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDependency2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & + setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcSubpass, + dstSubpass, + srcStageMask, + dstStageMask, + srcAccessMask, + dstAccessMask, + dependencyFlags, + viewOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency2 const & ) const = default; +#else + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && + ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( viewOffset == rhs.viewOffset ); +# endif + } + + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDependency2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassDependency2; + }; + using SubpassDependency2KHR = SubpassDependency2; + + struct RenderPassCreateInfo2 + { + using NativeType = VkRenderPassCreateInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + , correlatedViewMaskCount( correlatedViewMaskCount_ ) + , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2( + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {} ) + : flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) + , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setSubpasses( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setDependencies( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & + setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setCorrelatedViewMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + attachmentCount, + pAttachments, + subpassCount, + pSubpasses, + dependencyCount, + pDependencies, + correlatedViewMaskCount, + pCorrelatedViewMasks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; +#else + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && + ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); +# endif + } + + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreateInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassCreateInfo2; + }; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + + struct RenderPassFragmentDensityMapCreateInfoEXT + { + using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( + RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassFragmentDensityMapCreateInfoEXT & + operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT & + operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( + VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapAttachment = fragmentDensityMapAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); +# endif + } + + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == + sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassFragmentDensityMapCreateInfoEXT; + }; + + struct RenderPassInputAttachmentAspectCreateInfo + { + using NativeType = VkRenderPassInputAttachmentAspectCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassInputAttachmentAspectCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectReferenceCount( aspectReferenceCount_ ) + , pAspectReferences( pAspectReferences_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( + RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderPassInputAttachmentAspectCreateInfo( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + aspectReferences_ ) + : aspectReferenceCount( static_cast( aspectReferences_.size() ) ) + , pAspectReferences( aspectReferences_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassInputAttachmentAspectCreateInfo & + operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo & + operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + pAspectReferences = pAspectReferences_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + aspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; +#else + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); +# endif + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == + sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassInputAttachmentAspectCreateInfo; + }; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + struct RenderPassMultiviewCreateInfo + { + using NativeType = VkRenderPassMultiviewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassCount( subpassCount_ ) + , pViewMasks( pViewMasks_ ) + , dependencyCount( dependencyCount_ ) + , pViewOffsets( pViewOffsets_ ) + , correlationMaskCount( correlationMaskCount_ ) + , pCorrelationMasks( pCorrelationMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR + RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {} ) + : subpassCount( static_cast( viewMasks_.size() ) ) + , pViewMasks( viewMasks_.data() ) + , dependencyCount( static_cast( viewOffsets_.size() ) ) + , pViewOffsets( viewOffsets_.data() ) + , correlationMaskCount( static_cast( correlationMasks_.size() ) ) + , pCorrelationMasks( correlationMasks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassMultiviewCreateInfo & + operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pViewMasks = pViewMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pViewOffsets = pViewOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & + setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setCorrelationMasks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + subpassCount, + pViewMasks, + dependencyCount, + pViewOffsets, + correlationMaskCount, + pCorrelationMasks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; +#else + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && + ( pViewMasks == rhs.pViewMasks ) && ( dependencyCount == rhs.dependencyCount ) && + ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); +# endif + } + + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == + sizeof( VkRenderPassMultiviewCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassMultiviewCreateInfo; + }; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + struct SubpassSampleLocationsEXT + { + using NativeType = VkSubpassSampleLocationsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( + uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassIndex( subpassIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT + { + subpassIndex = subpassIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo( + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpassIndex, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; +#else + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpassIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == + sizeof( VkSubpassSampleLocationsEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassSampleLocationsEXT is not nothrow_move_constructible!" ); + + struct RenderPassSampleLocationsBeginInfoEXT + { + using NativeType = VkRenderPassSampleLocationsBeginInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderPassSampleLocationsBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( + uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) + , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) + , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) + , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSampleLocationsBeginInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentInitialSampleLocations_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + postSubpassSampleLocations_ = {} ) + : attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) + , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) + , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) + , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassSampleLocationsBeginInfoEXT & + operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT & + operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + attachmentInitialSampleLocationsCount, + pAttachmentInitialSampleLocations, + postSubpassSampleLocationsCount, + pPostSubpassSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && + ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); +# endif + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == + sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassSampleLocationsBeginInfoEXT; + }; + + struct RenderPassTransformBeginInfoQCOM + { + using NativeType = VkRenderPassTransformBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT : transform( transform_ ) + {} + + VULKAN_HPP_CONSTEXPR + RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassTransformBeginInfoQCOM & + operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; +#else + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); +# endif + } + + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == + sizeof( VkRenderPassTransformBeginInfoQCOM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderPassTransformBeginInfoQCOM; + }; + + struct RenderingAttachmentInfo + { + using NativeType = VkRenderingAttachmentInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , imageLayout( imageLayout_ ) + , resolveMode( resolveMode_ ) + , resolveImageView( resolveImageView_ ) + , resolveImageLayout( resolveImageLayout_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , clearValue( clearValue_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT + { + resolveMode = resolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageView = resolveImageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageLayout = resolveImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & + setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + imageView, + imageLayout, + resolveMode, + resolveImageView, + resolveImageLayout, + loadOp, + storeOp, + clearValue ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == + sizeof( VkRenderingAttachmentInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAttachmentInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderingAttachmentInfo; + }; + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + + struct RenderingFragmentDensityMapAttachmentInfoEXT + { + using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) + VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , imageLayout( imageLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( + RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderingFragmentDensityMapAttachmentInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingFragmentDensityMapAttachmentInfoEXT & + operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentDensityMapAttachmentInfoEXT & + operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & + setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & + setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default; +#else + bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && + ( imageLayout == rhs.imageLayout ); +# endif + } + + bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == + sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderingFragmentDensityMapAttachmentInfoEXT; + }; + + struct RenderingFragmentShadingRateAttachmentInfoKHR + { + using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR( + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , imageLayout( imageLayout_ ) + , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR( + RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : RenderingFragmentShadingRateAttachmentInfoKHR( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingFragmentShadingRateAttachmentInfoKHR & + operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentShadingRateAttachmentInfoKHR & + operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & + setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & + setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( + VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && + ( imageLayout == rhs.imageLayout ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); +# endif + } + + bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderingFragmentShadingRateAttachmentInfoKHR; + }; + + struct RenderingInfo + { + using NativeType = VkRenderingInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( + VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , renderArea( renderArea_ ) + , layerCount( layerCount_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pDepthAttachment( pDepthAttachment_ ) + , pStencilAttachment( pStencilAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo( + VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + uint32_t layerCount_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {} ) + : flags( flags_ ) + , renderArea( renderArea_ ) + , layerCount( layerCount_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pDepthAttachment( pDepthAttachment_ ) + , pStencilAttachment( pStencilAttachment_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & + setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & + setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthAttachment = pDepthAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment( + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pStencilAttachment = pStencilAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + renderArea, + layerCount, + viewMask, + colorAttachmentCount, + pColorAttachments, + pDepthAttachment, + pStencilAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingInfo const & ) const = default; +#else + bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( renderArea == rhs.renderArea ) && ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment ); +# endif + } + + bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t layerCount = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = RenderingInfo; + }; + using RenderingInfoKHR = RenderingInfo; + + struct ResolveImageInfo2 + { + using NativeType = VkResolveImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ResolveImageInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2( + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + : srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & + setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & + setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & + setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2 & setRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) + VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ResolveImageInfo2 const & ) const = default; +#else + bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && + ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ResolveImageInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ResolveImageInfo2; + }; + using ResolveImageInfo2KHR = ResolveImageInfo2; + + struct SamplerBorderColorComponentMappingCreateInfoEXT + { + using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {} ) VULKAN_HPP_NOEXCEPT + : components( components_ ) + , srgb( srgb_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( + SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : SamplerBorderColorComponentMappingCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerBorderColorComponentMappingCreateInfoEXT & + operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerBorderColorComponentMappingCreateInfoEXT & + operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & + setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT + { + srgb = srgb_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, components, srgb ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default; +#else + bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && + ( srgb == rhs.srgb ); +# endif + } + + bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::Bool32 srgb = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) == + sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerBorderColorComponentMappingCreateInfoEXT; + }; + + struct SamplerCreateInfo + { + using NativeType = VkSamplerCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & + setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + magFilter, + minFilter, + mipmapMode, + addressModeU, + addressModeV, + addressModeW, + mipLodBias, + anisotropyEnable, + maxAnisotropy, + compareEnable, + compareOp, + minLod, + maxLod, + borderColor, + unnormalizedCoordinates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) && + ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && + ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) && + ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && + ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); +# endif + } + + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + struct SamplerCustomBorderColorCreateInfoEXT + { + using NativeType = VkSamplerCustomBorderColorCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerCustomBorderColorCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : customBorderColor( customBorderColor_ ) + , format( format_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCustomBorderColorCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCustomBorderColorCreateInfoEXT & + operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT & + operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & + setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColor = customBorderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColor, format ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == + sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorCreateInfoEXT; + }; + + struct SamplerReductionModeCreateInfo + { + using NativeType = VkSamplerReductionModeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT + : reductionMode( reductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerReductionModeCreateInfo & + operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & + setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + { + reductionMode = reductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; +#else + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); +# endif + } + + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = + VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == + sizeof( VkSamplerReductionModeCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerReductionModeCreateInfo; + }; + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + + struct SamplerYcbcrConversionCreateInfo + { + using NativeType = VkSamplerYcbcrConversionCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionCreateInfo & + operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + ycbcrModel, + ycbcrRange, + components, + xChromaOffset, + yChromaOffset, + chromaFilter, + forceExplicitReconstruction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && + ( ycbcrModel == rhs.ycbcrModel ) && ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && + ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset ) && + ( chromaFilter == rhs.chromaFilter ) && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); +# endif + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == + sizeof( VkSamplerYcbcrConversionCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionCreateInfo; + }; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + struct SamplerYcbcrConversionImageFormatProperties + { + using NativeType = VkSamplerYcbcrConversionImageFormatProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSamplerYcbcrConversionImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( + SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) + VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionImageFormatProperties( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionImageFormatProperties & + operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties & + operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, combinedImageSamplerDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); +# endif + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) == + sizeof( VkSamplerYcbcrConversionImageFormatProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionImageFormatProperties; + }; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + + struct SamplerYcbcrConversionInfo + { + using NativeType = VkSamplerYcbcrConversionInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT + : conversion( conversion_ ) + {} + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & + setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + { + conversion = conversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); +# endif + } + + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == + sizeof( VkSamplerYcbcrConversionInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionInfo; + }; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenSurfaceCreateInfoQNX + { + using NativeType = VkScreenSurfaceCreateInfoQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, + struct _screen_context * context_ = {}, + struct _screen_window * window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , context( context_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR + ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & + setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & + setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT + { + context = context_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & + setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, context, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; +# else + bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && + ( window == rhs.window ); +# endif + } + + bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; + struct _screen_context * context = {}; + struct _screen_window * window = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == + sizeof( VkScreenSurfaceCreateInfoQNX ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ScreenSurfaceCreateInfoQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct SemaphoreCreateInfo + { + using NativeType = VkSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreCreateInfo; + }; + + struct SemaphoreGetFdInfoKHR + { + using NativeType = VkSemaphoreGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; +#else + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SemaphoreGetWin32HandleInfoKHR + { + using NativeType = VkSemaphoreGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR + SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreGetWin32HandleInfoKHR & + operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == + sizeof( VkSemaphoreGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SemaphoreGetZirconHandleInfoFUCHSIA + { + using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreGetZirconHandleInfoFUCHSIA & + operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA & + operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == + sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct SemaphoreSignalInfo + { + using NativeType = VkSemaphoreSignalInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , value( value_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, value ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; +#else + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ); +# endif + } + + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreSignalInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreSignalInfo; + }; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + + struct SemaphoreSubmitInfo + { + using NativeType = VkSemaphoreSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, + uint32_t deviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , value( value_ ) + , stageMask( stageMask_ ) + , deviceIndex( deviceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & + setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & + setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT + { + stageMask = stageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndex = deviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && + ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && ( deviceIndex == rhs.deviceIndex ); +# endif + } + + bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {}; + uint32_t deviceIndex = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreSubmitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreSubmitInfo; + }; + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + + struct SemaphoreTypeCreateInfo + { + using NativeType = VkSemaphoreTypeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphoreType( semaphoreType_ ) + , initialValue( initialValue_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & + setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreType = semaphoreType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT + { + initialValue = initialValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphoreType, initialValue ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && + ( initialValue == rhs.initialValue ); +# endif + } + + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; + uint64_t initialValue = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == + sizeof( VkSemaphoreTypeCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreTypeCreateInfo; + }; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + + struct SemaphoreWaitInfo + { + using NativeType = VkSemaphoreWaitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , semaphoreCount( semaphoreCount_ ) + , pSemaphores( pSemaphores_ ) + , pValues( pValues_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo( + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {} ) + : flags( flags_ ) + , semaphoreCount( static_cast( semaphores_.size() ) ) + , pSemaphores( semaphores_.data() ) + , pValues( values_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +# else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & + setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = semaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & + setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSemaphores = pSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & setSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) + VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & + setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreWaitInfo const & ) const = default; +#else + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( semaphoreCount == rhs.semaphoreCount ) && ( pSemaphores == rhs.pSemaphores ) && + ( pValues == rhs.pValues ); +# endif + } + + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreWaitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SemaphoreWaitInfo; + }; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + + struct SetStateFlagsIndirectCommandNV + { + using NativeType = VkSetStateFlagsIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} + + VULKAN_HPP_CONSTEXPR + SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SetStateFlagsIndirectCommandNV & + operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( data ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; +#else + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( data == rhs.data ); +# endif + } + + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t data = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == + sizeof( VkSetStateFlagsIndirectCommandNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" ); + + struct ShaderModuleCreateInfo + { + using NativeType = VkShaderModuleCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) + : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo & + setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * 4; + pCode = code_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, codeSize, pCode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); +# endif + } + + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == + sizeof( VkShaderModuleCreateInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleCreateInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + struct ShaderModuleValidationCacheCreateInfoEXT + { + using NativeType = VkShaderModuleValidationCacheCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eShaderModuleValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT + : validationCache( validationCache_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( + ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : ShaderModuleValidationCacheCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderModuleValidationCacheCreateInfoEXT & + operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT & + operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + { + validationCache = validationCache_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, validationCache ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); +# endif + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == + sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ShaderModuleValidationCacheCreateInfoEXT; + }; + + struct ShaderResourceUsageAMD + { + using NativeType = VkShaderResourceUsageAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, + uint32_t ldsSizePerLocalWorkGroup_ = {}, + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs( numUsedVgprs_ ) + , numUsedSgprs( numUsedSgprs_ ) + , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) + , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) + , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; +#else + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && + ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && + ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); +# endif + } + + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; + uint32_t ldsSizePerLocalWorkGroup = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == + sizeof( VkShaderResourceUsageAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderResourceUsageAMD is not nothrow_move_constructible!" ); + + struct ShaderStatisticsInfoAMD + { + using NativeType = VkShaderStatisticsInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderStageMask( shaderStageMask_ ) + , resourceUsage( resourceUsage_ ) + , numPhysicalVgprs( numPhysicalVgprs_ ) + , numPhysicalSgprs( numPhysicalSgprs_ ) + , numAvailableVgprs( numAvailableVgprs_ ) + , numAvailableSgprs( numAvailableSgprs_ ) + , computeWorkGroupSize( computeWorkGroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shaderStageMask, + resourceUsage, + numPhysicalVgprs, + numPhysicalSgprs, + numAvailableVgprs, + numAvailableSgprs, + computeWorkGroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; +#else + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && + ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && + ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); +# endif + } + + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == + sizeof( VkShaderStatisticsInfoAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" ); + + struct SharedPresentSurfaceCapabilitiesKHR + { + using NativeType = VkSharedPresentSurfaceCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSharedPresentSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SharedPresentSurfaceCapabilitiesKHR & + operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR & + operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sharedPresentSupportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); +# endif + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == + sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SharedPresentSurfaceCapabilitiesKHR; + }; + + struct SparseImageFormatProperties + { + using NativeType = VkSparseImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , imageGranularity( imageGranularity_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, imageGranularity, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; +#else + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == + sizeof( VkSparseImageFormatProperties ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SparseImageFormatProperties is not nothrow_move_constructible!" ); + + struct SparseImageFormatProperties2 + { + using NativeType = VkSparseImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == + sizeof( VkSparseImageFormatProperties2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SparseImageFormatProperties2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SparseImageFormatProperties2; + }; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct SparseImageMemoryRequirements + { + using NativeType = VkSparseImageMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + , imageMipTailFirstLod( imageMipTailFirstLod_ ) + , imageMipTailSize( imageMipTailSize_ ) + , imageMipTailOffset( imageMipTailOffset_ ) + , imageMipTailStride( imageMipTailStride_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryRequirements & + operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); +# endif + } + + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == + sizeof( VkSparseImageMemoryRequirements ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SparseImageMemoryRequirements is not nothrow_move_constructible!" ); + + struct SparseImageMemoryRequirements2 + { + using NativeType = VkSparseImageMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR + SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryRequirements2 & + operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == + sizeof( VkSparseImageMemoryRequirements2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SparseImageMemoryRequirements2; + }; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + +#if defined( VK_USE_PLATFORM_GGP ) + struct StreamDescriptorSurfaceCreateInfoGGP + { + using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , streamDescriptor( streamDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + StreamDescriptorSurfaceCreateInfoGGP & + operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP & + operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + streamDescriptor = streamDescriptor_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, streamDescriptor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); + } + + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == + sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = StreamDescriptorSurfaceCreateInfoGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct StridedDeviceAddressRegionKHR + { + using NativeType = VkStridedDeviceAddressRegionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , stride( stride_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR + StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + StridedDeviceAddressRegionKHR & + operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & + setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & + setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & + setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( deviceAddress, stride, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; +#else + bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); +# endif + } + + bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == + sizeof( VkStridedDeviceAddressRegionKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" ); + + struct SubmitInfo + { + using NativeType = VkSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitDstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBuffers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphores_ = {} ) + : waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , pWaitDstStageMask( waitDstStageMask_.data() ) + , commandBufferCount( static_cast( commandBuffers_.size() ) ) + , pCommandBuffers( commandBuffers_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +# else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & + setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & + setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitDstStageMask( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & + setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setCommandBuffers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) + VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & + setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setSignalSemaphores( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) + VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphores, + pWaitDstStageMask, + commandBufferCount, + pCommandBuffers, + signalSemaphoreCount, + pSignalSemaphores ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo const & ) const = default; +#else + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && + ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBuffers == rhs.pCommandBuffers ) && + ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores ); +# endif + } + + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubmitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubmitInfo; + }; + + struct SubmitInfo2 + { + using NativeType = VkSubmitInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, + uint32_t waitSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, + uint32_t commandBufferInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, + uint32_t signalSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ) + , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ) + , commandBufferInfoCount( commandBufferInfoCount_ ) + , pCommandBufferInfos( pCommandBufferInfos_ ) + , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ) + , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SubmitInfo2( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitSemaphoreInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphoreInfos_ = {} ) + : flags( flags_ ) + , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) + , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) + , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) + , pCommandBufferInfos( commandBufferInfos_.data() ) + , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) + , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = waitSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreInfos = pWaitSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setWaitSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); + pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = commandBufferInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos( + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferInfos = pCommandBufferInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setCommandBufferInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); + pCommandBufferInfos = commandBufferInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = signalSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos( + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreInfos = pSignalSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setSignalSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); + pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + waitSemaphoreInfoCount, + pWaitSemaphoreInfos, + commandBufferInfoCount, + pCommandBufferInfos, + signalSemaphoreInfoCount, + pSignalSemaphoreInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo2 const & ) const = default; +#else + bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && + ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && + ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && + ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && + ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && + ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); +# endif + } + + bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubmitFlags flags = {}; + uint32_t waitSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {}; + uint32_t commandBufferInfoCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {}; + uint32_t signalSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubmitInfo2 is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubmitInfo2; + }; + using SubmitInfo2KHR = SubmitInfo2; + + struct SubpassBeginInfo + { + using NativeType = VkSubpassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = + VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT + : contents( contents_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & + setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, contents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassBeginInfo const & ) const = default; +#else + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); +# endif + } + + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassBeginInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassBeginInfo; + }; + using SubpassBeginInfoKHR = SubpassBeginInfo; + + struct SubpassDescriptionDepthStencilResolve + { + using NativeType = VkSubpassDescriptionDepthStencilResolve; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSubpassDescriptionDepthStencilResolve; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : depthResolveMode( depthResolveMode_ ) + , stencilResolveMode( stencilResolveMode_ ) + , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescriptionDepthStencilResolve( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDescriptionDepthStencilResolve & + operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve & + operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + depthResolveMode = depthResolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + stencilResolveMode = stencilResolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; +#else + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && + ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); +# endif + } + + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == + sizeof( VkSubpassDescriptionDepthStencilResolve ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassDescriptionDepthStencilResolve; + }; + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + + struct SubpassEndInfo + { + using NativeType = VkSubpassEndInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassEndInfo( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassEndInfo const & ) const = default; +#else + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void * pNext = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassEndInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassEndInfo; + }; + using SubpassEndInfoKHR = SubpassEndInfo; + + struct SubpassFragmentDensityMapOffsetEndInfoQCOM + { + using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( + uint32_t fragmentDensityOffsetCount_ = {}, + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityOffsetCount( fragmentDensityOffsetCount_ ) + , pFragmentDensityOffsets( pFragmentDensityOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( + SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) + VULKAN_HPP_NOEXCEPT + : SubpassFragmentDensityMapOffsetEndInfoQCOM( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassFragmentDensityMapOffsetEndInfoQCOM( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + fragmentDensityOffsets_ ) + : fragmentDensityOffsetCount( static_cast( fragmentDensityOffsets_.size() ) ) + , pFragmentDensityOffsets( fragmentDensityOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassFragmentDensityMapOffsetEndInfoQCOM & + operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassFragmentDensityMapOffsetEndInfoQCOM & + operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityOffsetCount = fragmentDensityOffsetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentDensityOffsets = pFragmentDensityOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityOffsetCount = static_cast( fragmentDensityOffsets_.size() ); + pFragmentDensityOffsets = fragmentDensityOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassFragmentDensityMapOffsetEndInfoQCOM const & ) const = default; +#else + bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) && + ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets ); +# endif + } + + bool operator!=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; + const void * pNext = {}; + uint32_t fragmentDensityOffsetCount = {}; + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) == + sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM; + }; + + struct SubpassShadingPipelineCreateInfoHUAWEI + { + using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , subpass( subpass_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassShadingPipelineCreateInfoHUAWEI( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassShadingPipelineCreateInfoHUAWEI & + operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassShadingPipelineCreateInfoHUAWEI & + operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, subpass ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default; +#else + bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && + ( subpass == rhs.subpass ); +# endif + } + + bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == + sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SubpassShadingPipelineCreateInfoHUAWEI; + }; + + struct SurfaceCapabilities2EXT + { + using NativeType = VkSurfaceCapabilities2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + , supportedSurfaceCounters( supportedSurfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags, + supportedSurfaceCounters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && + ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && + ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); +# endif + } + + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == + sizeof( VkSurfaceCapabilities2EXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilities2EXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + struct SurfaceCapabilitiesKHR + { + using NativeType = VkSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && + ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ); +# endif + } + + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == + sizeof( VkSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" ); + + struct SurfaceCapabilities2KHR + { + using NativeType = VkSurfaceCapabilities2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCapabilities( surfaceCapabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceCapabilities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); +# endif + } + + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == + sizeof( VkSurfaceCapabilities2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilities2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilities2KHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceCapabilitiesFullScreenExclusiveEXT + { + using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT + : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( + SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesFullScreenExclusiveEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT & + operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & + setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusiveSupported = fullScreenExclusiveSupported_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fullScreenExclusiveSupported ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); +# endif + } + + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == + sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfaceFormatKHR + { + using NativeType = VkSurfaceFormatKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = + VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , colorSpace( colorSpace_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( format, colorSpace ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; +#else + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); +# endif + } + + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFormatKHR is not nothrow_move_constructible!" ); + + struct SurfaceFormat2KHR + { + using NativeType = VkSurfaceFormat2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceFormat( surfaceFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormat2KHR const & ) const = default; +#else + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); +# endif + } + + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFormat2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceFormat2KHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveInfoEXT + { + using NativeType = VkSurfaceFullScreenExclusiveInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceFullScreenExclusiveInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT + : fullScreenExclusive( fullScreenExclusive_ ) + {} + + VULKAN_HPP_CONSTEXPR + SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFullScreenExclusiveInfoEXT & + operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusive = fullScreenExclusive_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fullScreenExclusive ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); +# endif + } + + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == + sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveInfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveWin32InfoEXT + { + using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT + : hmonitor( hmonitor_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveWin32InfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT & + operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & + setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT + { + hmonitor = hmonitor_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hmonitor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); +# endif + } + + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == + sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveWin32InfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfaceProtectedCapabilitiesKHR + { + using NativeType = VkSurfaceProtectedCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT + : supportsProtected( supportsProtected_ ) + {} + + VULKAN_HPP_CONSTEXPR + SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceProtectedCapabilitiesKHR & + operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & + setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + { + supportsProtected = supportsProtected_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportsProtected ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); +# endif + } + + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == + sizeof( VkSurfaceProtectedCapabilitiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SurfaceProtectedCapabilitiesKHR; + }; + + struct SwapchainCounterCreateInfoEXT + { + using NativeType = VkSwapchainCounterCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCounters( surfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR + SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainCounterCreateInfoEXT & + operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + { + surfaceCounters = surfaceCounters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceCounters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); +# endif + } + + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == + sizeof( VkSwapchainCounterCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SwapchainCounterCreateInfoEXT; + }; + + struct SwapchainCreateInfoKHR + { + using NativeType = VkSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR( + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, + uint32_t minImageCount_, + VULKAN_HPP_NAMESPACE::Format imageFormat_, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, + uint32_t imageArrayLayers_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR & setQueueFamilyIndices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & + setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + surface, + minImageCount, + imageFormat, + imageColorSpace, + imageExtent, + imageArrayLayers, + imageUsage, + imageSharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + preTransform, + compositeAlpha, + presentMode, + clipped, + oldSwapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && + ( imageColorSpace == rhs.imageColorSpace ) && ( imageExtent == rhs.imageExtent ) && + ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && + ( compositeAlpha == rhs.compositeAlpha ) && ( presentMode == rhs.presentMode ) && + ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); +# endif + } + + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == + sizeof( VkSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SwapchainCreateInfoKHR; + }; + + struct SwapchainDisplayNativeHdrCreateInfoAMD + { + using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) + VULKAN_HPP_NOEXCEPT : localDimmingEnable( localDimmingEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainDisplayNativeHdrCreateInfoAMD( + *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD & + operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + { + localDimmingEnable = localDimmingEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, localDimmingEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; +#else + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); +# endif + } + + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == + sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = SwapchainDisplayNativeHdrCreateInfoAMD; + }; + + struct TextureLODGatherFormatPropertiesAMD + { + using NativeType = VkTextureLODGatherFormatPropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eTextureLodGatherFormatPropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT + : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) + {} + + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TextureLODGatherFormatPropertiesAMD & + operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD & + operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; +#else + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); +# endif + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == + sizeof( VkTextureLODGatherFormatPropertiesAMD ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = TextureLODGatherFormatPropertiesAMD; + }; + + struct TimelineSemaphoreSubmitInfo + { + using NativeType = VkTimelineSemaphoreSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValueCount( waitSemaphoreValueCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + {} + + VULKAN_HPP_CONSTEXPR + TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {} ) + : waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & + setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = waitSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & + setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = signalSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & + setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreValueCount, + pWaitSemaphoreValues, + signalSemaphoreValueCount, + pSignalSemaphoreValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && + ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == + sizeof( VkTimelineSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = TimelineSemaphoreSubmitInfo; + }; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + + struct TraceRaysIndirectCommandKHR + { + using NativeType = VkTraceRaysIndirectCommandKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + {} + + VULKAN_HPP_CONSTEXPR + TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) + {} + + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height, depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; +#else + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); +# endif + } + + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == + sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" ); + + struct ValidationCacheCreateInfoEXT + { + using NativeType = VkValidationCacheCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR + ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) + : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & + setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & + setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT & + setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData ); +# endif + } + + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == + sizeof( VkValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ValidationCacheCreateInfoEXT; + }; + + struct ValidationFeaturesEXT + { + using NativeType = VkValidationFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( + uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) + , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) + , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) + , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + enabledValidationFeatures_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationFeatures_ = {} ) + : enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) + , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) + , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) + , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = enabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPEnabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledValidationFeatures = pEnabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setEnabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = disabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPDisabledValidationFeatures( + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationFeatures = pDisabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setDisabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + enabledValidationFeatureCount, + pEnabledValidationFeatures, + disabledValidationFeatureCount, + pDisabledValidationFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFeaturesEXT const & ) const = default; +#else + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && + ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); +# endif + } + + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationFeaturesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ValidationFeaturesEXT; + }; + + struct ValidationFlagsEXT + { + using NativeType = VkValidationFlagsEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( + uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT + : disabledValidationCheckCount( disabledValidationCheckCount_ ) + , pDisabledValidationChecks( pDisabledValidationChecks_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationChecks_ ) + : disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) + , pDisabledValidationChecks( disabledValidationChecks_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & + setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks( + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT & setDisabledValidationChecks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; +#else + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); +# endif + } + + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationFlagsEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ValidationFlagsEXT; + }; + + struct VertexInputAttributeDescription2EXT + { + using NativeType = VkVertexInputAttributeDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVertexInputAttributeDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( + uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputAttributeDescription2EXT & + operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT & + operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & + setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, location, binding, format, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && + ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; + void * pNext = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == + sizeof( VkVertexInputAttributeDescription2EXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VertexInputAttributeDescription2EXT; + }; + + struct VertexInputBindingDescription2EXT + { + using NativeType = VkVertexInputBindingDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVertexInputBindingDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( + uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) + {} +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputBindingDescription2EXT & + operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & + setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, binding, stride, inputRate, divisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && + ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && ( divisor == rhs.divisor ); +# endif + } + + bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; + void * pNext = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + uint32_t divisor = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == + sizeof( VkVertexInputBindingDescription2EXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VertexInputBindingDescription2EXT; + }; + +#if defined( VK_USE_PLATFORM_VI_NN ) + struct ViSurfaceCreateInfoNN + { + using NativeType = VkViSurfaceCreateInfoNN; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, + void * window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & + setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif + } + + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = ViSurfaceCreateInfoNN; + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoPictureResourceKHR + { + using NativeType = VkVideoPictureResourceKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoPictureResourceKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {} ) VULKAN_HPP_NOEXCEPT + : codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , baseArrayLayer( baseArrayLayer_ ) + , imageViewBinding( imageViewBinding_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceKHR( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoPictureResourceKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoPictureResourceKHR & operator=( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceKHR & operator=( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & + setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & + setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & + setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + { + imageViewBinding = imageViewBinding_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoPictureResourceKHR const & ) const = default; +# else + bool operator==( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && + ( codedExtent == rhs.codedExtent ) && ( baseArrayLayer == rhs.baseArrayLayer ) && + ( imageViewBinding == rhs.imageViewBinding ); +# endif + } + + bool operator!=( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + uint32_t baseArrayLayer = {}; + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR ) == + sizeof( VkVideoPictureResourceKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoPictureResourceKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoPictureResourceKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoReferenceSlotKHR + { + using NativeType = VkVideoReferenceSlotKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( + int8_t slotIndex_ = {}, + const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pPictureResource( pPictureResource_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotKHR( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoReferenceSlotKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoReferenceSlotKHR & operator=( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotKHR & operator=( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & + setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + pPictureResource = pPictureResource_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, slotIndex, pPictureResource ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoReferenceSlotKHR const & ) const = default; +# else + bool operator==( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pPictureResource == rhs.pPictureResource ); +# endif + } + + bool operator!=( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotKHR; + const void * pNext = {}; + int8_t slotIndex = {}; + const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR ) == sizeof( VkVideoReferenceSlotKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoReferenceSlotKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoReferenceSlotKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoBeginCodingInfoKHR + { + using NativeType = VkVideoBeginCodingInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codecQualityPreset( codecQualityPreset_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR( + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , codecQualityPreset( codecQualityPreset_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setCodecQualityPreset( + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ ) VULKAN_HPP_NOEXCEPT + { + codecQualityPreset = codecQualityPreset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + codecQualityPreset, + videoSession, + videoSessionParameters, + referenceSlotCount, + pReferenceSlots ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; +# else + bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codecQualityPreset == rhs.codecQualityPreset ) && ( videoSession == rhs.videoSession ) && + ( videoSessionParameters == rhs.videoSessionParameters ) && + ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ); +# endif + } + + bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == + sizeof( VkVideoBeginCodingInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoBeginCodingInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoBindMemoryKHR + { + using NativeType = VkVideoBindMemoryKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryBindIndex( memoryBindIndex_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , memorySize( memorySize_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBindMemoryKHR( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBindMemoryKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoBindMemoryKHR & operator=( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBindMemoryKHR & operator=( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryBindIndex = memoryBindIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & + setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & + setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & + setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT + { + memorySize = memorySize_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBindMemoryKHR const & ) const = default; +# else + bool operator==( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && + ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); +# endif + } + + bool operator!=( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBindMemoryKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR ) == sizeof( VkVideoBindMemoryKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoBindMemoryKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoBindMemoryKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoCapabilitiesKHR + { + using NativeType = VkVideoCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {}, + uint32_t maxReferencePicturesSlotsCount_ = {}, + uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilityFlags( capabilityFlags_ ) + , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ) + , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ) + , videoPictureExtentGranularity( videoPictureExtentGranularity_ ) + , minExtent( minExtent_ ) + , maxExtent( maxExtent_ ) + , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) + , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + capabilityFlags, + minBitstreamBufferOffsetAlignment, + minBitstreamBufferSizeAlignment, + videoPictureExtentGranularity, + minExtent, + maxExtent, + maxReferencePicturesSlotsCount, + maxReferencePicturesActiveCount ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCapabilitiesKHR const & ) const = default; +# else + bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilityFlags == rhs.capabilityFlags ) && + ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && + ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && + ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity ) && ( minExtent == rhs.minExtent ) && + ( maxExtent == rhs.maxExtent ) && + ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && + ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ); +# endif + } + + bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D minExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {}; + uint32_t maxReferencePicturesSlotsCount = {}; + uint32_t maxReferencePicturesActiveCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoCapabilitiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoCapabilitiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoCodingControlInfoKHR + { + using NativeType = VkVideoCodingControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; +# else + bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == + sizeof( VkVideoCodingControlInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoCodingControlInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoCodingControlInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264CapabilitiesEXT + { + using NativeType = VkVideoDecodeH264CapabilitiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT( + uint32_t maxLevel_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : maxLevel( maxLevel_ ) + , fieldOffsetGranularity( fieldOffsetGranularity_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264CapabilitiesEXT & + operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevel, fieldOffsetGranularity, stdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) && + ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); +# endif + } + + bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT; + void * pNext = {}; + uint32_t maxLevel = {}; + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT ) == + sizeof( VkVideoDecodeH264CapabilitiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH264CapabilitiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264DpbSlotInfoEXT + { + using NativeType = VkVideoDecodeH264DpbSlotInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264DpbSlotInfoEXT & + operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT ) == + sizeof( VkVideoDecodeH264DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264MvcEXT + { + using NativeType = VkVideoDecodeH264MvcEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( const StdVideoDecodeH264Mvc * pStdMvc_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdMvc( pStdMvc_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264MvcEXT( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264MvcEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264MvcEXT & operator=( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264MvcEXT & operator=( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & + setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT + { + pStdMvc = pStdMvc_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdMvc ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264MvcEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdMvc == rhs.pStdMvc ); +# endif + } + + bool operator!=( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcEXT; + const void * pNext = {}; + const StdVideoDecodeH264Mvc * pStdMvc = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT ) == sizeof( VkVideoDecodeH264MvcEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264MvcEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264MvcEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264PictureInfoEXT + { + using NativeType = VkVideoDecodeH264PictureInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, + uint32_t slicesCount_ = {}, + const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( slicesCount_ ) + , pSlicesDataOffsets( pSlicesDataOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264PictureInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoEXT( + const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( static_cast( slicesDataOffsets_.size() ) ) + , pSlicesDataOffsets( slicesDataOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264PictureInfoEXT & + operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & + setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = slicesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & + setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSlicesDataOffsets = pSlicesDataOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = static_cast( slicesDataOffsets_.size() ); + pSlicesDataOffsets = slicesDataOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264PictureInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); +# endif + } + + bool operator!=( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; + uint32_t slicesCount = {}; + const uint32_t * pSlicesDataOffsets = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT ) == + sizeof( VkVideoDecodeH264PictureInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH264PictureInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264PictureInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264ProfileEXT + { + using NativeType = VkVideoDecodeH264ProfileEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT( + StdVideoH264ProfileIdc stdProfileIdc_ = {}, + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + , pictureLayout( pictureLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264ProfileEXT( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileEXT( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264ProfileEXT & operator=( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileEXT & operator=( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & + setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & + setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ ) VULKAN_HPP_NOEXCEPT + { + pictureLayout = pictureLayout_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc, pictureLayout ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && + ( pictureLayout == rhs.pictureLayout ); + } + + bool operator!=( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileEXT; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT ) == + sizeof( VkVideoDecodeH264ProfileEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264ProfileEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionCreateInfoEXT + { + using NativeType = VkVideoDecodeH264SessionCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( VideoDecodeH264SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionCreateInfoEXT( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264SessionCreateInfoEXT & + operator=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionCreateInfoEXT & + operator=( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pStdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); +# endif + } + + bool operator!=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT ) == + sizeof( VkVideoDecodeH264SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionParametersAddInfoEXT + { + using NativeType = VkVideoDecodeH264SessionParametersAddInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( + VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersAddInfoEXT & + operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoEXT & + operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); +# endif + } + + bool operator!=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH264SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH264PictureParameterSet * pPpsStd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT ) == + sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH264SessionParametersCreateInfoEXT + { + using NativeType = VkVideoDecodeH264SessionParametersCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( + VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersCreateInfoEXT & + operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoEXT & + operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & + setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & + setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT ) == + sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265CapabilitiesEXT + { + using NativeType = VkVideoDecodeH265CapabilitiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT( + uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : maxLevel( maxLevel_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265CapabilitiesEXT & + operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevel, stdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); +# endif + } + + bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT; + void * pNext = {}; + uint32_t maxLevel = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT ) == + sizeof( VkVideoDecodeH265CapabilitiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH265CapabilitiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265DpbSlotInfoEXT + { + using NativeType = VkVideoDecodeH265DpbSlotInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265DpbSlotInfoEXT & + operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + const void * pNext = {}; + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT ) == + sizeof( VkVideoDecodeH265DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265PictureInfoEXT + { + using NativeType = VkVideoDecodeH265PictureInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, + uint32_t slicesCount_ = {}, + const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( slicesCount_ ) + , pSlicesDataOffsets( pSlicesDataOffsets_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265PictureInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoEXT( + StdVideoDecodeH265PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) + : pStdPictureInfo( pStdPictureInfo_ ) + , slicesCount( static_cast( slicesDataOffsets_.size() ) ) + , pSlicesDataOffsets( slicesDataOffsets_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265PictureInfoEXT & + operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & + setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = slicesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & + setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSlicesDataOffsets = pSlicesDataOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + { + slicesCount = static_cast( slicesDataOffsets_.size() ); + pSlicesDataOffsets = slicesDataOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265PictureInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); +# endif + } + + bool operator!=( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT; + const void * pNext = {}; + StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; + uint32_t slicesCount = {}; + const uint32_t * pSlicesDataOffsets = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT ) == + sizeof( VkVideoDecodeH265PictureInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH265PictureInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265PictureInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265ProfileEXT + { + using NativeType = VkVideoDecodeH265ProfileEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265ProfileEXT( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileEXT( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265ProfileEXT & operator=( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileEXT & operator=( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & + setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileEXT; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT ) == + sizeof( VkVideoDecodeH265ProfileEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265ProfileEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionCreateInfoEXT + { + using NativeType = VkVideoDecodeH265SessionCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( VideoDecodeH265SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionCreateInfoEXT( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265SessionCreateInfoEXT & + operator=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionCreateInfoEXT & + operator=( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pStdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); +# endif + } + + bool operator!=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT ) == + sizeof( VkVideoDecodeH265SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionParametersAddInfoEXT + { + using NativeType = VkVideoDecodeH265SessionParametersAddInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH265PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( + VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265SessionParametersAddInfoEXT & + operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoEXT & + operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); +# endif + } + + bool operator!=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH265SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH265PictureParameterSet * pPpsStd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT ) == + sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeH265SessionParametersCreateInfoEXT + { + using NativeType = VkVideoDecodeH265SessionParametersCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( + VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265SessionParametersCreateInfoEXT & + operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoEXT & + operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & + setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & + setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT ) == + sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoDecodeInfoKHR + { + using NativeType = VkVideoDecodeInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Offset2D codedOffset_, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) + : flags( flags_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferOffset = srcBufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferRange = srcBufferRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource( + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + dstPictureResource = dstPictureResource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot( + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + codedOffset, + codedExtent, + srcBuffer, + srcBufferOffset, + srcBufferRange, + dstPictureResource, + pSetupReferenceSlot, + referenceSlotCount, + pReferenceSlots ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeInfoKHR const & ) const = default; +# else + bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && + ( srcBuffer == rhs.srcBuffer ) && ( srcBufferOffset == rhs.srcBufferOffset ) && + ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); +# endif + } + + bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoDecodeInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264CapabilitiesEXT + { + using NativeType = VkVideoEncodeH264CapabilitiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment_ = {}, + uint8_t maxNumL0ReferenceForP_ = {}, + uint8_t maxNumL0ReferenceForB_ = {}, + uint8_t maxNumL1Reference_ = {}, + uint8_t qualityLevelCount_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , inputModeFlags( inputModeFlags_ ) + , outputModeFlags( outputModeFlags_ ) + , minPictureSizeInMbs( minPictureSizeInMbs_ ) + , maxPictureSizeInMbs( maxPictureSizeInMbs_ ) + , inputImageDataAlignment( inputImageDataAlignment_ ) + , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ ) + , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ ) + , maxNumL1Reference( maxNumL1Reference_ ) + , qualityLevelCount( qualityLevelCount_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264CapabilitiesEXT & + operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + inputModeFlags = inputModeFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + outputModeFlags = outputModeFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setMinPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & minPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + minPictureSizeInMbs = minPictureSizeInMbs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + maxPictureSizeInMbs = maxPictureSizeInMbs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT + { + inputImageDataAlignment = inputImageDataAlignment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForP = maxNumL0ReferenceForP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForB = maxNumL0ReferenceForB_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL1Reference = maxNumL1Reference_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & + setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevelCount = qualityLevelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setStdExtensionVersion( + VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + stdExtensionVersion = stdExtensionVersion_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + inputModeFlags, + outputModeFlags, + minPictureSizeInMbs, + maxPictureSizeInMbs, + inputImageDataAlignment, + maxNumL0ReferenceForP, + maxNumL0ReferenceForB, + maxNumL1Reference, + qualityLevelCount, + stdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) && + ( minPictureSizeInMbs == rhs.minPictureSizeInMbs ) && ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && + ( inputImageDataAlignment == rhs.inputImageDataAlignment ) && + ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) && + ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) && + ( qualityLevelCount == rhs.qualityLevelCount ) && ( stdExtensionVersion == rhs.stdExtensionVersion ); +# endif + } + + bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {}; + VULKAN_HPP_NAMESPACE::Extent2D minPictureSizeInMbs = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {}; + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment = {}; + uint8_t maxNumL0ReferenceForP = {}; + uint8_t maxNumL0ReferenceForB = {}; + uint8_t maxNumL1Reference = {}; + uint8_t qualityLevelCount = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) == + sizeof( VkVideoEncodeH264CapabilitiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264DpbSlotInfoEXT + { + using NativeType = VkVideoEncodeH264DpbSlotInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264DpbSlotInfoEXT( int8_t slotIndex_ = {}, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264DpbSlotInfoEXT & + operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & + setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, slotIndex, pStdPictureInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pStdPictureInfo == rhs.pStdPictureInfo ); +# endif + } + + bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + const void * pNext = {}; + int8_t slotIndex = {}; + const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) == + sizeof( VkVideoEncodeH264DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264EmitPictureParametersEXT + { + using NativeType = VkVideoEncodeH264EmitPictureParametersEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264EmitPictureParametersEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264EmitPictureParametersEXT( uint8_t spsId_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, + uint32_t ppsIdEntryCount_ = {}, + const uint8_t * ppsIdEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : spsId( spsId_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( ppsIdEntryCount_ ) + , ppsIdEntries( ppsIdEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT( VideoEncodeH264EmitPictureParametersEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264EmitPictureParametersEXT( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264EmitPictureParametersEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264EmitPictureParametersEXT( + uint8_t spsId_, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) + : spsId( spsId_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( static_cast( psIdEntries_.size() ) ) + , ppsIdEntries( psIdEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264EmitPictureParametersEXT & + operator=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264EmitPictureParametersEXT & + operator=( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT + { + spsId = spsId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & + setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT + { + emitSpsEnable = emitSpsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & + setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = ppsIdEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & + setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntries = ppsIdEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = static_cast( psIdEntries_.size() ); + ppsIdEntries = psIdEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, spsId, emitSpsEnable, ppsIdEntryCount, ppsIdEntries ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264EmitPictureParametersEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) && + ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && + ( ppsIdEntries == rhs.ppsIdEntries ); +# endif + } + + bool operator!=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersEXT; + const void * pNext = {}; + uint8_t spsId = {}; + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; + uint32_t ppsIdEntryCount = {}; + const uint8_t * ppsIdEntries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT ) == + sizeof( VkVideoEncodeH264EmitPictureParametersEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264EmitPictureParametersEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264EmitPictureParametersEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264FrameSizeEXT + { + using NativeType = VkVideoEncodeH264FrameSizeEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( uint32_t frameISize_ = {}, + uint32_t framePSize_ = {}, + uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize( frameISize_ ) + , framePSize( framePSize_ ) + , frameBSize( frameBSize_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264FrameSizeEXT( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264FrameSizeEXT( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264FrameSizeEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264FrameSizeEXT & operator=( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264FrameSizeEXT & operator=( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT + { + frameISize = frameISize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + { + framePSize = framePSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + { + frameBSize = frameBSize_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( frameISize, framePSize, frameBSize ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264FrameSizeEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif + } + + bool operator!=( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) == + sizeof( VkVideoEncodeH264FrameSizeEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264NaluSliceEXT + { + using NativeType = VkVideoEncodeH264NaluSliceEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT( + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {}, + uint32_t mbCount_ = {}, + uint8_t refFinalList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ = {}, + uint8_t refFinalList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ = {} ) VULKAN_HPP_NOEXCEPT + : pSliceHeaderStd( pSliceHeaderStd_ ) + , mbCount( mbCount_ ) + , refFinalList0EntryCount( refFinalList0EntryCount_ ) + , pRefFinalList0Entries( pRefFinalList0Entries_ ) + , refFinalList1EntryCount( refFinalList1EntryCount_ ) + , pRefFinalList1Entries( pRefFinalList1Entries_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264NaluSliceEXT( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceEXT( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264NaluSliceEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT( + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_, + uint32_t mbCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList1Entries_ = {} ) + : pSliceHeaderStd( pSliceHeaderStd_ ) + , mbCount( mbCount_ ) + , refFinalList0EntryCount( static_cast( refFinalList0Entries_.size() ) ) + , pRefFinalList0Entries( refFinalList0Entries_.data() ) + , refFinalList1EntryCount( static_cast( refFinalList1Entries_.size() ) ) + , pRefFinalList1Entries( refFinalList1Entries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264NaluSliceEXT & operator=( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceEXT & operator=( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & + setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT + { + pSliceHeaderStd = pSliceHeaderStd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT + { + mbCount = mbCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & + setRefFinalList0EntryCount( uint8_t refFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList0EntryCount = refFinalList0EntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPRefFinalList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefFinalList0Entries = pRefFinalList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT & setRefFinalList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList0EntryCount = static_cast( refFinalList0Entries_.size() ); + pRefFinalList0Entries = refFinalList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & + setRefFinalList1EntryCount( uint8_t refFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList1EntryCount = refFinalList1EntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPRefFinalList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefFinalList1Entries = pRefFinalList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264NaluSliceEXT & setRefFinalList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + refFinalList1EntryCount = static_cast( refFinalList1Entries_.size() ); + pRefFinalList1Entries = refFinalList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pSliceHeaderStd, + mbCount, + refFinalList0EntryCount, + pRefFinalList0Entries, + refFinalList1EntryCount, + pRefFinalList1Entries ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264NaluSliceEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSliceHeaderStd == rhs.pSliceHeaderStd ) && + ( mbCount == rhs.mbCount ) && ( refFinalList0EntryCount == rhs.refFinalList0EntryCount ) && + ( pRefFinalList0Entries == rhs.pRefFinalList0Entries ) && + ( refFinalList1EntryCount == rhs.refFinalList1EntryCount ) && + ( pRefFinalList1Entries == rhs.pRefFinalList1Entries ); +# endif + } + + bool operator!=( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceEXT; + const void * pNext = {}; + const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {}; + uint32_t mbCount = {}; + uint8_t refFinalList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries = {}; + uint8_t refFinalList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT ) == + sizeof( VkVideoEncodeH264NaluSliceEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264NaluSliceEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264NaluSliceEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264ProfileEXT + { + using NativeType = VkVideoEncodeH264ProfileEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264ProfileEXT( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileEXT( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264ProfileEXT & operator=( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileEXT & operator=( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & + setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileEXT; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT ) == + sizeof( VkVideoEncodeH264ProfileEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264ProfileEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264QpEXT + { + using NativeType = VkVideoEncodeH264QpEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI( qpI_ ) + , qpP( qpP_ ) + , qpB( qpB_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QpEXT( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QpEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264QpEXT & operator=( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QpEXT & operator=( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + { + qpI = qpI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + { + qpP = qpP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + { + qpB = qpB_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( qpI, qpP, qpB ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QpEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif + } + + bool operator!=( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QpEXT is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264RateControlInfoEXT + { + using NativeType = VkVideoEncodeH264RateControlInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264RateControlInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ = + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown, + uint8_t temporalLayerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : gopFrameCount( gopFrameCount_ ) + , idrPeriod( idrPeriod_ ) + , consecutiveBFrameCount( consecutiveBFrameCount_ ) + , rateControlStructure( rateControlStructure_ ) + , temporalLayerCount( temporalLayerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264RateControlInfoEXT( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlInfoEXT( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264RateControlInfoEXT & + operator=( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlInfoEXT & operator=( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & + setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT + { + idrPeriod = idrPeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & + setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setRateControlStructure( + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT + { + rateControlStructure = rateControlStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & + setTemporalLayerCount( uint8_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && + ( rateControlStructure == rhs.rateControlStructure ) && ( temporalLayerCount == rhs.temporalLayerCount ); +# endif + } + + bool operator!=( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT; + const void * pNext = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure = + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown; + uint8_t temporalLayerCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) == + sizeof( VkVideoEncodeH264RateControlInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264RateControlInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264RateControlLayerInfoEXT + { + using NativeType = VkVideoEncodeH264RateControlLayerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264RateControlLayerInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( + uint8_t temporalLayerId_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT + : temporalLayerId( temporalLayerId_ ) + , useInitialRcQp( useInitialRcQp_ ) + , initialRcQp( initialRcQp_ ) + , useMinQp( useMinQp_ ) + , minQp( minQp_ ) + , useMaxQp( useMaxQp_ ) + , maxQp( maxQp_ ) + , useMaxFrameSize( useMaxFrameSize_ ) + , maxFrameSize( maxFrameSize_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlLayerInfoEXT( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlLayerInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264RateControlLayerInfoEXT & + operator=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlLayerInfoEXT & + operator=( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setTemporalLayerId( uint8_t temporalLayerId_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerId = temporalLayerId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT + { + useInitialRcQp = useInitialRcQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT + { + initialRcQp = initialRcQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + { + useMinQp = useMinQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQp = useMaxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + temporalLayerId, + useInitialRcQp, + initialRcQp, + useMinQp, + minQp, + useMaxQp, + maxQp, + useMaxFrameSize, + maxFrameSize ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlLayerInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalLayerId == rhs.temporalLayerId ) && + ( useInitialRcQp == rhs.useInitialRcQp ) && ( initialRcQp == rhs.initialRcQp ) && + ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && + ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT; + const void * pNext = {}; + uint8_t temporalLayerId = {}; + VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) == + sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264RateControlLayerInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionCreateInfoEXT + { + using NativeType = VkVideoEncodeH264SessionCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , maxPictureSizeInMbs( maxPictureSizeInMbs_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( VideoEncodeH264SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoEXT( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionCreateInfoEXT & + operator=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoEXT & + operator=( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & + setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT + { + maxPictureSizeInMbs = maxPictureSizeInMbs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, maxPictureSizeInMbs, pStdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && ( pStdExtensionVersion == rhs.pStdExtensionVersion ); +# endif + } + + bool operator!=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT ) == + sizeof( VkVideoEncodeH264SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionParametersAddInfoEXT + { + using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( + uint32_t spsStdCount_ = {}, + const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH264PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( + VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersAddInfoEXT & + operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoEXT & + operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && + ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t spsStdCount = {}; + const StdVideoH264SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH264PictureParameterSet * pPpsStd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) == + sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264SessionParametersCreateInfoEXT + { + using NativeType = VkVideoEncodeH264SessionParametersCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( + VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersCreateInfoEXT & + operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoEXT & + operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & + setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & + setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && + ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) == + sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH264VclFrameInfoEXT + { + using NativeType = VkVideoEncodeH264VclFrameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( + uint8_t refDefaultFinalList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ = {}, + uint8_t refDefaultFinalList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ = {}, + uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : refDefaultFinalList0EntryCount( refDefaultFinalList0EntryCount_ ) + , pRefDefaultFinalList0Entries( pRefDefaultFinalList0Entries_ ) + , refDefaultFinalList1EntryCount( refDefaultFinalList1EntryCount_ ) + , pRefDefaultFinalList1Entries( pRefDefaultFinalList1Entries_ ) + , naluSliceEntryCount( naluSliceEntryCount_ ) + , pNaluSliceEntries( pNaluSliceEntries_ ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList1Entries_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) + : refDefaultFinalList0EntryCount( static_cast( refDefaultFinalList0Entries_.size() ) ) + , pRefDefaultFinalList0Entries( refDefaultFinalList0Entries_.data() ) + , refDefaultFinalList1EntryCount( static_cast( refDefaultFinalList1Entries_.size() ) ) + , pRefDefaultFinalList1Entries( refDefaultFinalList1Entries_.data() ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264VclFrameInfoEXT & + operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & + setRefDefaultFinalList0EntryCount( uint8_t refDefaultFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList0EntryCount = refDefaultFinalList0EntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefDefaultFinalList0Entries = pRefDefaultFinalList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList0EntryCount = static_cast( refDefaultFinalList0Entries_.size() ); + pRefDefaultFinalList0Entries = refDefaultFinalList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & + setRefDefaultFinalList1EntryCount( uint8_t refDefaultFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList1EntryCount = refDefaultFinalList1EntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pRefDefaultFinalList1Entries = pRefDefaultFinalList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + refDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + refDefaultFinalList1EntryCount = static_cast( refDefaultFinalList1Entries_.size() ); + pRefDefaultFinalList1Entries = refDefaultFinalList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & + setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = naluSliceEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceEntries = pNaluSliceEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCurrentPictureInfo = pCurrentPictureInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + refDefaultFinalList0EntryCount, + pRefDefaultFinalList0Entries, + refDefaultFinalList1EntryCount, + pRefDefaultFinalList1Entries, + naluSliceEntryCount, + pNaluSliceEntries, + pCurrentPictureInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( refDefaultFinalList0EntryCount == rhs.refDefaultFinalList0EntryCount ) && + ( pRefDefaultFinalList0Entries == rhs.pRefDefaultFinalList0Entries ) && + ( refDefaultFinalList1EntryCount == rhs.refDefaultFinalList1EntryCount ) && + ( pRefDefaultFinalList1Entries == rhs.pRefDefaultFinalList1Entries ) && + ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && + ( pCurrentPictureInfo == rhs.pCurrentPictureInfo ); +# endif + } + + bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + const void * pNext = {}; + uint8_t refDefaultFinalList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries = {}; + uint8_t refDefaultFinalList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT ) == + sizeof( VkVideoEncodeH264VclFrameInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH264VclFrameInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265CapabilitiesEXT + { + using NativeType = VkVideoEncodeH265CapabilitiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment_ = {}, + uint8_t maxNumL0ReferenceForP_ = {}, + uint8_t maxNumL0ReferenceForB_ = {}, + uint8_t maxNumL1Reference_ = {}, + uint8_t maxNumSubLayers_ = {}, + uint8_t qualityLevelCount_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , inputModeFlags( inputModeFlags_ ) + , outputModeFlags( outputModeFlags_ ) + , ctbSizes( ctbSizes_ ) + , inputImageDataAlignment( inputImageDataAlignment_ ) + , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ ) + , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ ) + , maxNumL1Reference( maxNumL1Reference_ ) + , maxNumSubLayers( maxNumSubLayers_ ) + , qualityLevelCount( qualityLevelCount_ ) + , stdExtensionVersion( stdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 + VideoEncodeH265CapabilitiesEXT( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265CapabilitiesEXT( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265CapabilitiesEXT & + operator=( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265CapabilitiesEXT & operator=( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + inputModeFlags = inputModeFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT + { + outputModeFlags = outputModeFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setCtbSizes( VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ ) VULKAN_HPP_NOEXCEPT + { + ctbSizes = ctbSizes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT + { + inputImageDataAlignment = inputImageDataAlignment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForP = maxNumL0ReferenceForP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL0ReferenceForB = maxNumL0ReferenceForB_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT + { + maxNumL1Reference = maxNumL1Reference_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setMaxNumSubLayers( uint8_t maxNumSubLayers_ ) VULKAN_HPP_NOEXCEPT + { + maxNumSubLayers = maxNumSubLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & + setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevelCount = qualityLevelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setStdExtensionVersion( + VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + stdExtensionVersion = stdExtensionVersion_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + inputModeFlags, + outputModeFlags, + ctbSizes, + inputImageDataAlignment, + maxNumL0ReferenceForP, + maxNumL0ReferenceForB, + maxNumL1Reference, + maxNumSubLayers, + qualityLevelCount, + stdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265CapabilitiesEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) && + ( ctbSizes == rhs.ctbSizes ) && ( inputImageDataAlignment == rhs.inputImageDataAlignment ) && + ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) && + ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) && + ( maxNumSubLayers == rhs.maxNumSubLayers ) && ( qualityLevelCount == rhs.qualityLevelCount ) && + ( stdExtensionVersion == rhs.stdExtensionVersion ); +# endif + } + + bool operator!=( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {}; + VULKAN_HPP_NAMESPACE::Extent2D inputImageDataAlignment = {}; + uint8_t maxNumL0ReferenceForP = {}; + uint8_t maxNumL0ReferenceForB = {}; + uint8_t maxNumL1Reference = {}; + uint8_t maxNumSubLayers = {}; + uint8_t qualityLevelCount = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) == + sizeof( VkVideoEncodeH265CapabilitiesEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265CapabilitiesEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265DpbSlotInfoEXT + { + using NativeType = VkVideoEncodeH265DpbSlotInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( + int8_t slotIndex_ = {}, const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : slotIndex( slotIndex_ ) + , pStdReferenceInfo( pStdReferenceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265DpbSlotInfoEXT( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265DpbSlotInfoEXT( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265DpbSlotInfoEXT & + operator=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265DpbSlotInfoEXT & operator=( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & + setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, slotIndex, pStdReferenceInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265DpbSlotInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && + ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoEXT; + const void * pNext = {}; + int8_t slotIndex = {}; + const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) == + sizeof( VkVideoEncodeH265DpbSlotInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265DpbSlotInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265EmitPictureParametersEXT + { + using NativeType = VkVideoEncodeH265EmitPictureParametersEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH265EmitPictureParametersEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265EmitPictureParametersEXT( uint8_t vpsId_ = {}, + uint8_t spsId_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, + uint32_t ppsIdEntryCount_ = {}, + const uint8_t * ppsIdEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : vpsId( vpsId_ ) + , spsId( spsId_ ) + , emitVpsEnable( emitVpsEnable_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( ppsIdEntryCount_ ) + , ppsIdEntries( ppsIdEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersEXT( VideoEncodeH265EmitPictureParametersEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265EmitPictureParametersEXT( VkVideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265EmitPictureParametersEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265EmitPictureParametersEXT( + uint8_t vpsId_, + uint8_t spsId_, + VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_, + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) + : vpsId( vpsId_ ) + , spsId( spsId_ ) + , emitVpsEnable( emitVpsEnable_ ) + , emitSpsEnable( emitSpsEnable_ ) + , ppsIdEntryCount( static_cast( psIdEntries_.size() ) ) + , ppsIdEntries( psIdEntries_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265EmitPictureParametersEXT & + operator=( VideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265EmitPictureParametersEXT & + operator=( VkVideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setVpsId( uint8_t vpsId_ ) VULKAN_HPP_NOEXCEPT + { + vpsId = vpsId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT + { + spsId = spsId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & + setEmitVpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ ) VULKAN_HPP_NOEXCEPT + { + emitVpsEnable = emitVpsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & + setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT + { + emitSpsEnable = emitSpsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & + setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = ppsIdEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & + setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntries = ppsIdEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265EmitPictureParametersEXT & setPsIdEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT + { + ppsIdEntryCount = static_cast( psIdEntries_.size() ); + ppsIdEntries = psIdEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vpsId, spsId, emitVpsEnable, emitSpsEnable, ppsIdEntryCount, ppsIdEntries ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265EmitPictureParametersEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsId == rhs.vpsId ) && ( spsId == rhs.spsId ) && + ( emitVpsEnable == rhs.emitVpsEnable ) && ( emitSpsEnable == rhs.emitSpsEnable ) && + ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && ( ppsIdEntries == rhs.ppsIdEntries ); +# endif + } + + bool operator!=( VideoEncodeH265EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersEXT; + const void * pNext = {}; + uint8_t vpsId = {}; + uint8_t spsId = {}; + VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; + uint32_t ppsIdEntryCount = {}; + const uint8_t * ppsIdEntries = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT ) == + sizeof( VkVideoEncodeH265EmitPictureParametersEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265EmitPictureParametersEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265EmitPictureParametersEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265FrameSizeEXT + { + using NativeType = VkVideoEncodeH265FrameSizeEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( uint32_t frameISize_ = {}, + uint32_t framePSize_ = {}, + uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize( frameISize_ ) + , framePSize( framePSize_ ) + , frameBSize( frameBSize_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265FrameSizeEXT( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265FrameSizeEXT( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265FrameSizeEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265FrameSizeEXT & operator=( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265FrameSizeEXT & operator=( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT + { + frameISize = frameISize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + { + framePSize = framePSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + { + frameBSize = frameBSize_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( frameISize, framePSize, frameBSize ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265FrameSizeEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif + } + + bool operator!=( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) == + sizeof( VkVideoEncodeH265FrameSizeEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265ReferenceListsEXT + { + using NativeType = VkVideoEncodeH265ReferenceListsEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsEXT( + uint8_t referenceList0EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ = {}, + uint8_t referenceList1EntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ = {}, + const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {} ) VULKAN_HPP_NOEXCEPT + : referenceList0EntryCount( referenceList0EntryCount_ ) + , pReferenceList0Entries( pReferenceList0Entries_ ) + , referenceList1EntryCount( referenceList1EntryCount_ ) + , pReferenceList1Entries( pReferenceList1Entries_ ) + , pReferenceModifications( pReferenceModifications_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265ReferenceListsEXT( VideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265ReferenceListsEXT( VkVideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265ReferenceListsEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265ReferenceListsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceList0Entries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceList1Entries_ = {}, + const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {} ) + : referenceList0EntryCount( static_cast( referenceList0Entries_.size() ) ) + , pReferenceList0Entries( referenceList0Entries_.data() ) + , referenceList1EntryCount( static_cast( referenceList1Entries_.size() ) ) + , pReferenceList1Entries( referenceList1Entries_.data() ) + , pReferenceModifications( pReferenceModifications_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265ReferenceListsEXT & + operator=( VideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265ReferenceListsEXT & operator=( VkVideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & + setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceList0EntryCount = referenceList0EntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList0Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceList0Entries = pReferenceList0Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265ReferenceListsEXT & setReferenceList0Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceList0Entries_ ) VULKAN_HPP_NOEXCEPT + { + referenceList0EntryCount = static_cast( referenceList0Entries_.size() ); + pReferenceList0Entries = referenceList0Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & + setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceList1EntryCount = referenceList1EntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList1Entries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceList1Entries = pReferenceList1Entries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265ReferenceListsEXT & setReferenceList1Entries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceList1Entries_ ) VULKAN_HPP_NOEXCEPT + { + referenceList1EntryCount = static_cast( referenceList1Entries_.size() ); + pReferenceList1Entries = referenceList1Entries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceModifications( + const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceModifications = pReferenceModifications_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265ReferenceListsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265ReferenceListsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + referenceList0EntryCount, + pReferenceList0Entries, + referenceList1EntryCount, + pReferenceList1Entries, + pReferenceModifications ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265ReferenceListsEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( referenceList0EntryCount == rhs.referenceList0EntryCount ) && + ( pReferenceList0Entries == rhs.pReferenceList0Entries ) && + ( referenceList1EntryCount == rhs.referenceList1EntryCount ) && + ( pReferenceList1Entries == rhs.pReferenceList1Entries ) && + ( pReferenceModifications == rhs.pReferenceModifications ); +# endif + } + + bool operator!=( VideoEncodeH265ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsEXT; + const void * pNext = {}; + uint8_t referenceList0EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries = {}; + uint8_t referenceList1EntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries = {}; + const StdVideoEncodeH265ReferenceModifications * pReferenceModifications = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT ) == + sizeof( VkVideoEncodeH265ReferenceListsEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265ReferenceListsEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265ReferenceListsEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265NaluSliceEXT + { + using NativeType = VkVideoEncodeH265NaluSliceEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceEXT( + uint32_t ctbCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ = {}, + const StdVideoEncodeH265SliceHeader * pSliceHeaderStd_ = {} ) VULKAN_HPP_NOEXCEPT + : ctbCount( ctbCount_ ) + , pReferenceFinalLists( pReferenceFinalLists_ ) + , pSliceHeaderStd( pSliceHeaderStd_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265NaluSliceEXT( VideoEncodeH265NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265NaluSliceEXT( VkVideoEncodeH265NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265NaluSliceEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265NaluSliceEXT & operator=( VideoEncodeH265NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265NaluSliceEXT & operator=( VkVideoEncodeH265NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceEXT & setCtbCount( uint32_t ctbCount_ ) VULKAN_HPP_NOEXCEPT + { + ctbCount = ctbCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceEXT & setPReferenceFinalLists( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceFinalLists = pReferenceFinalLists_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceEXT & + setPSliceHeaderStd( const StdVideoEncodeH265SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT + { + pSliceHeaderStd = pSliceHeaderStd_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265NaluSliceEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ctbCount, pReferenceFinalLists, pSliceHeaderStd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265NaluSliceEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ctbCount == rhs.ctbCount ) && + ( pReferenceFinalLists == rhs.pReferenceFinalLists ) && ( pSliceHeaderStd == rhs.pSliceHeaderStd ); +# endif + } + + bool operator!=( VideoEncodeH265NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceEXT; + const void * pNext = {}; + uint32_t ctbCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists = {}; + const StdVideoEncodeH265SliceHeader * pSliceHeaderStd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceEXT ) == + sizeof( VkVideoEncodeH265NaluSliceEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265NaluSliceEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265NaluSliceEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265ProfileEXT + { + using NativeType = VkVideoEncodeH265ProfileEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT + : stdProfileIdc( stdProfileIdc_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265ProfileEXT( VideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265ProfileEXT( VkVideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265ProfileEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265ProfileEXT & operator=( VideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265ProfileEXT & operator=( VkVideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT & + setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileEXT; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT ) == + sizeof( VkVideoEncodeH265ProfileEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265ProfileEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265ProfileEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265QpEXT + { + using NativeType = VkVideoEncodeH265QpEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI( qpI_ ) + , qpP( qpP_ ) + , qpB( qpB_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QpEXT( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QpEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265QpEXT & operator=( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QpEXT & operator=( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + { + qpI = qpI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + { + qpP = qpP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + { + qpB = qpB_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( qpI, qpP, qpB ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QpEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif + } + + bool operator!=( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QpEXT is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265RateControlInfoEXT + { + using NativeType = VkVideoEncodeH265RateControlInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH265RateControlInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ = + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown, + uint8_t subLayerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : gopFrameCount( gopFrameCount_ ) + , idrPeriod( idrPeriod_ ) + , consecutiveBFrameCount( consecutiveBFrameCount_ ) + , rateControlStructure( rateControlStructure_ ) + , subLayerCount( subLayerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265RateControlInfoEXT( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlInfoEXT( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265RateControlInfoEXT & + operator=( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlInfoEXT & operator=( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & + setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT + { + idrPeriod = idrPeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & + setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setRateControlStructure( + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT + { + rateControlStructure = rateControlStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & + setSubLayerCount( uint8_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + subLayerCount = subLayerCount_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && + ( rateControlStructure == rhs.rateControlStructure ) && ( subLayerCount == rhs.subLayerCount ); +# endif + } + + bool operator!=( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT; + const void * pNext = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure = + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown; + uint8_t subLayerCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) == + sizeof( VkVideoEncodeH265RateControlInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265RateControlLayerInfoEXT + { + using NativeType = VkVideoEncodeH265RateControlLayerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH265RateControlLayerInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( + uint8_t temporalId_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT + : temporalId( temporalId_ ) + , useInitialRcQp( useInitialRcQp_ ) + , initialRcQp( initialRcQp_ ) + , useMinQp( useMinQp_ ) + , minQp( minQp_ ) + , useMaxQp( useMaxQp_ ) + , maxQp( maxQp_ ) + , useMaxFrameSize( useMaxFrameSize_ ) + , maxFrameSize( maxFrameSize_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlLayerInfoEXT( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlLayerInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265RateControlLayerInfoEXT & + operator=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlLayerInfoEXT & + operator=( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setTemporalId( uint8_t temporalId_ ) VULKAN_HPP_NOEXCEPT + { + temporalId = temporalId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT + { + useInitialRcQp = useInitialRcQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT + { + initialRcQp = initialRcQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + { + useMinQp = useMinQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQp = useMaxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + temporalId, + useInitialRcQp, + initialRcQp, + useMinQp, + minQp, + useMaxQp, + maxQp, + useMaxFrameSize, + maxFrameSize ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlLayerInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalId == rhs.temporalId ) && + ( useInitialRcQp == rhs.useInitialRcQp ) && ( initialRcQp == rhs.initialRcQp ) && + ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && + ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT; + const void * pNext = {}; + uint8_t temporalId = {}; + VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) == + sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlLayerInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265SessionCreateInfoEXT + { + using NativeType = VkVideoEncodeH265SessionCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH265SessionCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT( + VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pStdExtensionVersion( pStdExtensionVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT( VideoEncodeH265SessionCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionCreateInfoEXT( VkVideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionCreateInfoEXT( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionCreateInfoEXT & + operator=( VideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionCreateInfoEXT & + operator=( VkVideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setPStdExtensionVersion( + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdExtensionVersion = pStdExtensionVersion_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pStdExtensionVersion ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( pStdExtensionVersion == rhs.pStdExtensionVersion ); +# endif + } + + bool operator!=( VideoEncodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT ) == + sizeof( VkVideoEncodeH265SessionCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265SessionParametersAddInfoEXT + { + using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH265SessionParametersAddInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( + uint32_t vpsStdCount_ = {}, + const StdVideoH265VideoParameterSet * pVpsStd_ = {}, + uint32_t spsStdCount_ = {}, + const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, + uint32_t ppsStdCount_ = {}, + const StdVideoH265PictureParameterSet * pPpsStd_ = {} ) VULKAN_HPP_NOEXCEPT + : vpsStdCount( vpsStdCount_ ) + , pVpsStd( pVpsStd_ ) + , spsStdCount( spsStdCount_ ) + , pSpsStd( pSpsStd_ ) + , ppsStdCount( ppsStdCount_ ) + , pPpsStd( pPpsStd_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( + VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersAddInfoEXT( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersAddInfoEXT( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vpsStd_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {} ) + : vpsStdCount( static_cast( vpsStd_.size() ) ) + , pVpsStd( vpsStd_.data() ) + , spsStdCount( static_cast( spsStd_.size() ) ) + , pSpsStd( spsStd_.data() ) + , ppsStdCount( static_cast( ppsStd_.size() ) ) + , pPpsStd( ppsStd_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersAddInfoEXT & + operator=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersAddInfoEXT & + operator=( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setVpsStdCount( uint32_t vpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + vpsStdCount = vpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setPVpsStd( const StdVideoH265VideoParameterSet * pVpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pVpsStd = pVpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoEXT & + setVpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vpsStd_ ) + VULKAN_HPP_NOEXCEPT + { + vpsStdCount = static_cast( vpsStd_.size() ); + pVpsStd = vpsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + spsStdCount = spsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pSpsStd = pSpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoEXT & + setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) + VULKAN_HPP_NOEXCEPT + { + spsStdCount = static_cast( spsStd_.size() ); + pSpsStd = spsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + ppsStdCount = ppsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & + setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + { + pPpsStd = pPpsStd_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoEXT & + setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) + VULKAN_HPP_NOEXCEPT + { + ppsStdCount = static_cast( ppsStd_.size() ); + pPpsStd = ppsStd_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersAddInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsStdCount == rhs.vpsStdCount ) && + ( pVpsStd == rhs.pVpsStd ) && ( spsStdCount == rhs.spsStdCount ) && ( pSpsStd == rhs.pSpsStd ) && + ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT; + const void * pNext = {}; + uint32_t vpsStdCount = {}; + const StdVideoH265VideoParameterSet * pVpsStd = {}; + uint32_t spsStdCount = {}; + const StdVideoH265SequenceParameterSet * pSpsStd = {}; + uint32_t ppsStdCount = {}; + const StdVideoH265PictureParameterSet * pPpsStd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) == + sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersAddInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265SessionParametersCreateInfoEXT + { + using NativeType = VkVideoEncodeH265SessionParametersCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT( + uint32_t maxVpsStdCount_ = {}, + uint32_t maxSpsStdCount_ = {}, + uint32_t maxPpsStdCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} ) + VULKAN_HPP_NOEXCEPT + : maxVpsStdCount( maxVpsStdCount_ ) + , maxSpsStdCount( maxSpsStdCount_ ) + , maxPpsStdCount( maxPpsStdCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT( + VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersCreateInfoEXT( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersCreateInfoEXT( + *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersCreateInfoEXT & + operator=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersCreateInfoEXT & + operator=( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & + setMaxVpsStdCount( uint32_t maxVpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxVpsStdCount = maxVpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & + setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSpsStdCount = maxSpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & + setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPpsStdCount = maxPpsStdCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersCreateInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVpsStdCount == rhs.maxVpsStdCount ) && + ( maxSpsStdCount == rhs.maxSpsStdCount ) && ( maxPpsStdCount == rhs.maxPpsStdCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT; + const void * pNext = {}; + uint32_t maxVpsStdCount = {}; + uint32_t maxSpsStdCount = {}; + uint32_t maxPpsStdCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) == + sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersCreateInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeH265VclFrameInfoEXT + { + using NativeType = VkVideoEncodeH265VclFrameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265VclFrameInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ = {}, + uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceEXT * pNaluSliceEntries_ = {}, + const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pReferenceFinalLists( pReferenceFinalLists_ ) + , naluSliceEntryCount( naluSliceEntryCount_ ) + , pNaluSliceEntries( pNaluSliceEntries_ ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265VclFrameInfoEXT( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265VclFrameInfoEXT( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265VclFrameInfoEXT( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265VclFrameInfoEXT( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_, + const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {} ) + : pReferenceFinalLists( pReferenceFinalLists_ ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pCurrentPictureInfo( pCurrentPictureInfo_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265VclFrameInfoEXT & + operator=( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265VclFrameInfoEXT & operator=( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPReferenceFinalLists( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceFinalLists = pReferenceFinalLists_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & + setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = naluSliceEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNaluSliceEntries( + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceEntries = pNaluSliceEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265VclFrameInfoEXT & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & + setPCurrentPictureInfo( const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCurrentPictureInfo = pCurrentPictureInfo_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, pReferenceFinalLists, naluSliceEntryCount, pNaluSliceEntries, pCurrentPictureInfo ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265VclFrameInfoEXT const & ) const = default; +# else + bool operator==( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) && + ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && + ( pCurrentPictureInfo == rhs.pCurrentPictureInfo ); +# endif + } + + bool operator!=( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceEXT * pNaluSliceEntries = {}; + const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT ) == + sizeof( VkVideoEncodeH265VclFrameInfoEXT ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeH265VclFrameInfoEXT; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeInfoKHR + { + using NativeType = VkVideoEncodeInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, + uint32_t qualityLevel_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {}, + uint32_t precedingExternallyEncodedBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , qualityLevel( qualityLevel_ ) + , codedExtent( codedExtent_ ) + , dstBitstreamBuffer( dstBitstreamBuffer_ ) + , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) + , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, + uint32_t qualityLevel_, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_, + uint32_t precedingExternallyEncodedBytes_ = {} ) + : flags( flags_ ) + , qualityLevel( qualityLevel_ ) + , codedExtent( codedExtent_ ) + , dstBitstreamBuffer( dstBitstreamBuffer_ ) + , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) + , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBuffer = dstBitstreamBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBufferOffset = dstBitstreamBufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT + { + dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource( + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + srcPictureResource = srcPictureResource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot( + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT + { + precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + qualityLevel, + codedExtent, + dstBitstreamBuffer, + dstBitstreamBufferOffset, + dstBitstreamBufferMaxRange, + srcPictureResource, + pSetupReferenceSlot, + referenceSlotCount, + pReferenceSlots, + precedingExternallyEncodedBytes ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( qualityLevel == rhs.qualityLevel ) && ( codedExtent == rhs.codedExtent ) && + ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) && + ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) && + ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) && + ( srcPictureResource == rhs.srcPictureResource ) && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && + ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots ) && + ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes ); +# endif + } + + bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; + uint32_t qualityLevel = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {}; + uint32_t precedingExternallyEncodedBytes = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeRateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeRateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoEncodeRateControlLayerInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeRateControlLayerInfoKHR( uint32_t averageBitrate_ = {}, + uint32_t maxBitrate_ = {}, + uint32_t frameRateNumerator_ = {}, + uint32_t frameRateDenominator_ = {}, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {} ) VULKAN_HPP_NOEXCEPT + : averageBitrate( averageBitrate_ ) + , maxBitrate( maxBitrate_ ) + , frameRateNumerator( frameRateNumerator_ ) + , frameRateDenominator( frameRateDenominator_ ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeRateControlLayerInfoKHR & + operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlLayerInfoKHR & + operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & + setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT + { + averageBitrate = averageBitrate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & + setMaxBitrate( uint32_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT + { + maxBitrate = maxBitrate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & + setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateNumerator = frameRateNumerator_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & + setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateDenominator = frameRateDenominator_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & + setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + virtualBufferSizeInMs = virtualBufferSizeInMs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & + setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + averageBitrate, + maxBitrate, + frameRateNumerator, + frameRateDenominator, + virtualBufferSizeInMs, + initialVirtualBufferSizeInMs ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) && + ( maxBitrate == rhs.maxBitrate ) && ( frameRateNumerator == rhs.frameRateNumerator ) && + ( frameRateDenominator == rhs.frameRateDenominator ) && + ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) && + ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs ); +# endif + } + + bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR; + const void * pNext = {}; + uint32_t averageBitrate = {}; + uint32_t maxBitrate = {}; + uint32_t frameRateNumerator = {}; + uint32_t frameRateDenominator = {}; + uint32_t virtualBufferSizeInMs = {}; + uint32_t initialVirtualBufferSizeInMs = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == + sizeof( VkVideoEncodeRateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeRateControlLayerInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeRateControlInfoKHR + { + using NativeType = VkVideoEncodeRateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, + uint8_t layerCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , layerCount( layerCount_ ) + , pLayerConfigs( pLayerConfigs_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ ) + : flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , layerCount( static_cast( layerConfigs_.size() ) ) + , pLayerConfigs( layerConfigs_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeRateControlInfoKHR & + operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT + { + rateControlMode = rateControlMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint8_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayerConfigs( + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ ) VULKAN_HPP_NOEXCEPT + { + pLayerConfigs = pLayerConfigs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR & setLayerConfigs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries< + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = static_cast( layerConfigs_.size() ); + pLayerConfigs = layerConfigs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayerConfigs ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default; +# else + bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( rateControlMode == rhs.rateControlMode ) && ( layerCount == rhs.layerCount ) && + ( pLayerConfigs == rhs.pLayerConfigs ); +# endif + } + + bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone; + uint8_t layerCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == + sizeof( VkVideoEncodeRateControlInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEncodeRateControlInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEndCodingInfoKHR + { + using NativeType = VkVideoEndCodingInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEndCodingInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEndCodingInfoKHR const & ) const = default; +# else + bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEndCodingInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoEndCodingInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoFormatPropertiesKHR + { + using NativeType = VkVideoFormatPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + explicit operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatPropertiesKHR const & ) const = default; +# else + bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); +# endif + } + + bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == + sizeof( VkVideoFormatPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoFormatPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoGetMemoryPropertiesKHR + { + using NativeType = VkVideoGetMemoryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR( + uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryBindIndex( memoryBindIndex_ ) + , pMemoryRequirements( pMemoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoGetMemoryPropertiesKHR( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoGetMemoryPropertiesKHR( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoGetMemoryPropertiesKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoGetMemoryPropertiesKHR & operator=( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoGetMemoryPropertiesKHR & operator=( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & + setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryBindIndex = memoryBindIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & + setPMemoryRequirements( VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryRequirements = pMemoryRequirements_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryBindIndex, pMemoryRequirements ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoGetMemoryPropertiesKHR const & ) const = default; +# else + bool operator==( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && + ( pMemoryRequirements == rhs.pMemoryRequirements ); +# endif + } + + bool operator!=( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoGetMemoryPropertiesKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR ) == + sizeof( VkVideoGetMemoryPropertiesKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoGetMemoryPropertiesKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoGetMemoryPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoQueueFamilyProperties2KHR + { + using NativeType = VkVideoQueueFamilyProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : videoCodecOperations( videoCodecOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoQueueFamilyProperties2KHR( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoQueueFamilyProperties2KHR( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoQueueFamilyProperties2KHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoQueueFamilyProperties2KHR & + operator=( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoQueueFamilyProperties2KHR & operator=( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setVideoCodecOperations( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperations = videoCodecOperations_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoCodecOperations ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoQueueFamilyProperties2KHR const & ) const = default; +# else + bool operator==( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); +# endif + } + + bool operator!=( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoQueueFamilyProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR ) == + sizeof( VkVideoQueueFamilyProperties2KHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoQueueFamilyProperties2KHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoQueueFamilyProperties2KHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionCreateInfoKHR + { + using NativeType = VkVideoSessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR( + uint32_t queueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ = {}, + VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxReferencePicturesSlotsCount_ = {}, + uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyIndex( queueFamilyIndex_ ) + , flags( flags_ ) + , pVideoProfile( pVideoProfile_ ) + , pictureFormat( pictureFormat_ ) + , maxCodedExtent( maxCodedExtent_ ) + , referencePicturesFormat( referencePicturesFormat_ ) + , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) + , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) + {} + + VULKAN_HPP_CONSTEXPR + VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + pictureFormat = pictureFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT + { + maxCodedExtent = maxCodedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT + { + referencePicturesFormat = referencePicturesFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT + { + maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT + { + maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + queueFamilyIndex, + flags, + pVideoProfile, + pictureFormat, + maxCodedExtent, + referencePicturesFormat, + maxReferencePicturesSlotsCount, + maxReferencePicturesActiveCount ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( flags == rhs.flags ) && ( pVideoProfile == rhs.pVideoProfile ) && + ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) && + ( referencePicturesFormat == rhs.referencePicturesFormat ) && + ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && + ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ); +# endif + } + + bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile = {}; + VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxReferencePicturesSlotsCount = {}; + uint32_t maxReferencePicturesActiveCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == + sizeof( VkVideoSessionCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoSessionCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionParametersCreateInfoKHR + { + using NativeType = VkVideoSessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoSessionParametersCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {} ) VULKAN_HPP_NOEXCEPT + : videoSessionParametersTemplate( videoSessionParametersTemplate_ ) + , videoSession( videoSession_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionParametersCreateInfoKHR & + operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR & + operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate( + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParametersTemplate = videoSessionParametersTemplate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoSessionParametersTemplate, videoSession ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && + ( videoSession == rhs.videoSession ); +# endif + } + + bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == + sizeof( VkVideoSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoSessionParametersCreateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoSessionParametersUpdateInfoKHR + { + using NativeType = VkVideoSessionParametersUpdateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eVideoSessionParametersUpdateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {} ) VULKAN_HPP_NOEXCEPT + : updateSequenceCount( updateSequenceCount_ ) + {} + + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionParametersUpdateInfoKHR & + operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR & + operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & + setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + updateSequenceCount = updateSequenceCount_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, updateSequenceCount ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default; +# else + bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount ); +# endif + } + + bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; + const void * pNext = {}; + uint32_t updateSequenceCount = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == + sizeof( VkVideoSessionParametersUpdateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = VideoSessionParametersUpdateInfoKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + struct WaylandSurfaceCreateInfoKHR + { + using NativeType = VkWaylandSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display * display_ = {}, + struct wl_surface * surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR + WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WaylandSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT + { + display = display_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, display, surface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && + ( surface == rhs.surface ); +# endif + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display * display = {}; + struct wl_surface * surface = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == + sizeof( VkWaylandSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = WaylandSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeouts( pAcquireTimeouts_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeouts( acquireTimeouts_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeouts_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeouts_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR & + operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeouts_.size() ); + pAcquireTimeouts = acquireTimeouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + acquireCount, + pAcquireSyncs, + pAcquireKeys, + pAcquireTimeouts, + releaseCount, + pReleaseSyncs, + pReleaseKeys ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) && + ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); +# endif + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == + sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32KeyedMutexAcquireReleaseInfoNV + { + using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {} ) + : acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Win32KeyedMutexAcquireReleaseInfoNV & + operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV & + operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ ) + VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeoutMilliseconds_.size() ); + pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) + VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + acquireCount, + pAcquireSyncs, + pAcquireKeys, + pAcquireTimeoutMilliseconds, + releaseCount, + pReleaseSyncs, + pReleaseKeys ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && + ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) && + ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) && + ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && + ( pReleaseKeys == rhs.pReleaseKeys ); +# endif + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == + sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32SurfaceCreateInfoKHR + { + using NativeType = VkWin32SurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, + HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , hinstance( hinstance_ ) + , hwnd( hwnd_ ) + {} + + VULKAN_HPP_CONSTEXPR + Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32SurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT + { + hinstance = hinstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT + { + hwnd = hwnd_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, hinstance, hwnd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd ); +# endif + } + + bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == + sizeof( VkWin32SurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = Win32SurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct WriteDescriptorSet + { + using NativeType = VkWriteDescriptorSet; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet( + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, + uint32_t dstBinding_, + uint32_t dstArrayElement_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferInfo_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + texelBufferView_ = {} ) + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() + : !bufferInfo_.empty() ? bufferInfo_.size() + : texelBufferView_.size() ) ) + , descriptorType( descriptorType_ ) + , pImageInfo( imageInfo_.data() ) + , pBufferInfo( bufferInfo_.data() ) + , pTexelBufferView( texelBufferView_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); +# else + if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setImageInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setBufferInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & + setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & setTexelBufferView( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + dstSet, + dstBinding, + dstArrayElement, + descriptorCount, + descriptorType, + pImageInfo, + pBufferInfo, + pTexelBufferView ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSet const & ) const = default; +#else + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && + ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && + ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && + ( pTexelBufferView == rhs.pTexelBufferView ); +# endif + } + + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSet is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSet; + }; + + struct WriteDescriptorSetAccelerationStructureKHR + { + using NativeType = VkWriteDescriptorSetAccelerationStructureKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetAccelerationStructureKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( + WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) + VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureKHR( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSetAccelerationStructureKHR & + operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR & + operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == + sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureKHR; + }; + + struct WriteDescriptorSetAccelerationStructureNV + { + using NativeType = VkWriteDescriptorSetAccelerationStructureNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( + uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( + WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureNV( + *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) + : accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSetAccelerationStructureNV & + operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV & + operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & + accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == + sizeof( VkWriteDescriptorSetAccelerationStructureNV ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureNV; + }; + + struct WriteDescriptorSetInlineUniformBlock + { + using NativeType = VkWriteDescriptorSetInlineUniformBlock; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = + StructureType::eWriteDescriptorSetInlineUniformBlock; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetInlineUniformBlock( *reinterpret_cast( &rhs ) ) + {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) + : dataSize( static_cast( data_.size() * sizeof( T ) ) ), pData( data_.data() ) + {} +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSetInlineUniformBlock & + operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlock & + operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlock & + setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = static_cast( data_.size() * sizeof( T ) ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default; +#else + bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock; + const void * pNext = {}; + uint32_t dataSize = {}; + const void * pData = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == + sizeof( VkWriteDescriptorSetInlineUniformBlock ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( + std::is_nothrow_move_constructible::value, + "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = WriteDescriptorSetInlineUniformBlock; + }; + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + struct XcbSurfaceCreateInfoKHR + { + using NativeType = VkXcbSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t * connection_ = {}, + xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , connection( connection_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XcbSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & + setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT + { + connection = connection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, connection, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = connection <=> rhs.connection; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( connection == rhs.connection ) && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t * connection = {}; + xcb_window_t window = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == + sizeof( VkXcbSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = XcbSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + struct XlibSurfaceCreateInfoKHR + { + using NativeType = VkXlibSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, + Display * dpy_ = {}, + Window window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dpy( dpy_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XlibSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + {} +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT + { + dpy = dpy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + explicit operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + explicit operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if !defined( __GNUC__ ) || ( 70500 < GCC_VERSION ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dpy, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) && + ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display * dpy = {}; + Window window = {}; + }; + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == + sizeof( VkXlibSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); + VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + + template <> + struct CppType + { + using Type = XlibSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/Externals/Vulkan/Include/vulkan/vulkan_vi.h b/Externals/Vulkan/Include/vulkan/vulkan_vi.h index 6fb66f9dd284..0355e7a162ca 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_vi.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_vi.h @@ -2,19 +2,9 @@ #define VULKAN_VI_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Externals/Vulkan/Include/vulkan/vulkan_wayland.h b/Externals/Vulkan/Include/vulkan/vulkan_wayland.h index 599d05b24a53..9afd0b76d5f1 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_wayland.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_wayland.h @@ -2,19 +2,9 @@ #define VULKAN_WAYLAND_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Externals/Vulkan/Include/vulkan/vulkan_win32.h b/Externals/Vulkan/Include/vulkan/vulkan_win32.h index 20a1dc0e5883..affe0c02aefb 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_win32.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_win32.h @@ -2,19 +2,9 @@ #define VULKAN_WIN32_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* @@ -272,9 +262,6 @@ typedef enum VkFullScreenExclusiveEXT { VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1, VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2, VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3, - VK_FULL_SCREEN_EXCLUSIVE_BEGIN_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, - VK_FULL_SCREEN_EXCLUSIVE_END_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, - VK_FULL_SCREEN_EXCLUSIVE_RANGE_SIZE_EXT = (VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT - VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT + 1), VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF } VkFullScreenExclusiveEXT; typedef struct VkSurfaceFullScreenExclusiveInfoEXT { diff --git a/Externals/Vulkan/Include/vulkan/vulkan_xcb.h b/Externals/Vulkan/Include/vulkan/vulkan_xcb.h index 4cc0bc0cec5b..68e61b88f0dc 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_xcb.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_xcb.h @@ -2,19 +2,9 @@ #define VULKAN_XCB_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Externals/Vulkan/Include/vulkan/vulkan_xlib.h b/Externals/Vulkan/Include/vulkan/vulkan_xlib.h index ee2b48accb0a..ea5360ab647b 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_xlib.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_xlib.h @@ -2,19 +2,9 @@ #define VULKAN_XLIB_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Externals/Vulkan/Include/vulkan/vulkan_xlib_xrandr.h b/Externals/Vulkan/Include/vulkan/vulkan_xlib_xrandr.h index 08c4fd729cd2..8fc35cfc56e4 100644 --- a/Externals/Vulkan/Include/vulkan/vulkan_xlib_xrandr.h +++ b/Externals/Vulkan/Include/vulkan/vulkan_xlib_xrandr.h @@ -2,19 +2,9 @@ #define VULKAN_XLIB_XRANDR_H_ 1 /* -** Copyright (c) 2015-2019 The Khronos Group Inc. +** Copyright 2015-2022 The Khronos Group Inc. ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. +** SPDX-License-Identifier: Apache-2.0 */ /* diff --git a/Source/Core/VideoBackends/Vulkan/VKSwapChain.h b/Source/Core/VideoBackends/Vulkan/VKSwapChain.h index 0077a93ec1e0..d83ee570517f 100644 --- a/Source/Core/VideoBackends/Vulkan/VKSwapChain.h +++ b/Source/Core/VideoBackends/Vulkan/VKSwapChain.h @@ -94,7 +94,7 @@ class SwapChain WindowSystemInfo m_wsi; VkSurfaceKHR m_surface = VK_NULL_HANDLE; VkSurfaceFormatKHR m_surface_format = {}; - VkPresentModeKHR m_present_mode = VK_PRESENT_MODE_RANGE_SIZE_KHR; + VkPresentModeKHR m_present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR; AbstractTextureFormat m_texture_format = AbstractTextureFormat::Undefined; bool m_vsync_enabled = false; bool m_fullscreen_supported = false; From 07f2587e633f4b50a2cabd2a65f2ded8aeac5f1e Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 29 Jan 2022 11:14:09 +0100 Subject: [PATCH 006/237] Android: Get rid of the boot timeout We don't have a timeout like this on other platforms, and it doesn't accomplish anything useful as far as I can tell. If you trigger it, all that happens is that you don't get a working game and also can't press Exit Emulation without Dolphin hanging (stuck in Core::Shutdown). --- Source/Android/jni/MainAndroid.cpp | 34 ++++++------------------------ 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/Source/Android/jni/MainAndroid.cpp b/Source/Android/jni/MainAndroid.cpp index b533ba74d641..e7e4d8446719 100644 --- a/Source/Android/jni/MainAndroid.cpp +++ b/Source/Android/jni/MainAndroid.cpp @@ -84,7 +84,6 @@ std::mutex s_surface_lock; bool s_need_nonblocking_alert_msg; Common::Flag s_is_booting; -bool s_have_wm_user_stop = false; bool s_game_metadata_is_valid = false; } // Anonymous namespace @@ -123,7 +122,6 @@ void Host_Message(HostMessageID id) } else if (id == HostMessageID::WMUserStop) { - s_have_wm_user_stop = true; if (Core::IsRunning()) Core::QueueHostJob(&Core::Stop); } @@ -554,8 +552,6 @@ static void Run(JNIEnv* env, std::unique_ptr&& boot, bool riivol WiimoteReal::InitAdapterClass(); - s_have_wm_user_stop = false; - if (riivolution && std::holds_alternative(boot->parameters)) { const std::string& riivolution_dir = File::GetUserPath(D_RIIVOLUTION_IDX); @@ -572,41 +568,25 @@ static void Run(JNIEnv* env, std::unique_ptr&& boot, bool riivol s_need_nonblocking_alert_msg = true; std::unique_lock surface_guard(s_surface_lock); - bool successful_boot = BootManager::BootCore(std::move(boot), wsi); - if (successful_boot) + if (BootManager::BootCore(std::move(boot), wsi)) { ButtonManager::Init(SConfig::GetInstance().GetGameID()); - static constexpr int TIMEOUT = 10000; static constexpr int WAIT_STEP = 25; - int time_waited = 0; - // A Core::CORE_ERROR state would be helpful here. - while (!Core::IsRunningAndStarted()) - { - if (time_waited >= TIMEOUT || s_have_wm_user_stop) - { - successful_boot = false; - break; - } - + while (Core::GetState() == Core::State::Starting) std::this_thread::sleep_for(std::chrono::milliseconds(WAIT_STEP)); - time_waited += WAIT_STEP; - } } s_is_booting.Clear(); s_need_nonblocking_alert_msg = false; surface_guard.unlock(); - if (successful_boot) + while (Core::IsRunning()) { - while (Core::IsRunningAndStarted()) - { - host_identity_guard.unlock(); - s_update_main_frame_event.Wait(); - host_identity_guard.lock(); - Core::HostDispatchJobs(); - } + host_identity_guard.unlock(); + s_update_main_frame_event.Wait(); + host_identity_guard.lock(); + Core::HostDispatchJobs(); } s_game_metadata_is_valid = false; From c5c011dd12cfe3c91947c915d0e7ce73aa831cbb Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 29 Jan 2022 14:36:28 +0100 Subject: [PATCH 007/237] Android: Fix swapped texture dumping description strings --- Source/Android/app/src/main/res/values/strings.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Source/Android/app/src/main/res/values/strings.xml b/Source/Android/app/src/main/res/values/strings.xml index d5553ea25f20..64b943660d1a 100644 --- a/Source/Android/app/src/main/res/values/strings.xml +++ b/Source/Android/app/src/main/res/values/strings.xml @@ -316,9 +316,9 @@ Dump Textures Dumps decoded game textures based on the other flags to User/Dump/Textures/<game_id>/. If unsure, leave this unchecked. Dump Base Textures - Whether to dump mipmapped game textures to User/Dump/Textures/<game_id>/. + Whether to dump base game textures to User/Dump/Textures/<game_id>/. Dump Mip Maps - Whether to dump base game textures to User/Dump/Textures/<game_id>/. + Whether to dump mipmapped game textures to User/Dump/Textures/<game_id>/. Misc Crop Crops the picture from its native aspect ratio to 4:3 or 16:9. If unsure, leave this unchecked. From 7f32057e9184573d046bea4faef53dae42419943 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 29 Jan 2022 15:48:34 +0100 Subject: [PATCH 008/237] Android: Call OnConfigChanged when resetting a setting Otherwise the value of the setting won't be updated properly. --- Source/Android/jni/Config/NativeConfig.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Source/Android/jni/Config/NativeConfig.cpp b/Source/Android/jni/Config/NativeConfig.cpp index bdeafc5f13d4..a84906aaecbc 100644 --- a/Source/Android/jni/Config/NativeConfig.cpp +++ b/Source/Android/jni/Config/NativeConfig.cpp @@ -147,7 +147,10 @@ Java_org_dolphinemu_dolphinemu_features_settings_model_NativeConfig_deleteKey( JNIEnv* env, jclass, jint layer, jstring file, jstring section, jstring key) { const Config::Location location = GetLocation(env, file, section, key); - return static_cast(GetLayer(layer, location)->DeleteKey(location)); + const bool had_value = GetLayer(layer, location)->DeleteKey(location); + if (had_value) + Config::OnConfigChanged(); + return static_cast(had_value); } JNIEXPORT jstring JNICALL From a9a163657ccfa888d2c4fcd6eb26ac5f0c994203 Mon Sep 17 00:00:00 2001 From: Jordan Woyak Date: Sun, 30 Jan 2022 13:16:51 -0600 Subject: [PATCH 009/237] VideoCommon/FrameDump: Build fix for libavformat major version 59 and newer. av_guess_format now returns a pointer to const. --- Source/Core/VideoCommon/FrameDump.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/VideoCommon/FrameDump.cpp b/Source/Core/VideoCommon/FrameDump.cpp index 9a8e3b4e1f06..e137170309a9 100644 --- a/Source/Core/VideoCommon/FrameDump.cpp +++ b/Source/Core/VideoCommon/FrameDump.cpp @@ -154,7 +154,7 @@ bool FrameDump::CreateVideoFile() File::CreateFullPath(dump_path); - AVOutputFormat* const output_format = av_guess_format(format.c_str(), dump_path.c_str(), nullptr); + auto* const output_format = av_guess_format(format.c_str(), dump_path.c_str(), nullptr); if (!output_format) { ERROR_LOG_FMT(FRAMEDUMP, "Invalid format {}", format); From 942da3ce5f5167fadc791e848c535a811cde15c7 Mon Sep 17 00:00:00 2001 From: Simonx22 Date: Thu, 3 Feb 2022 11:05:36 -0500 Subject: [PATCH 010/237] Android: Optimize imports --- .../dolphinemu/activities/ConvertActivity.java | 4 ++-- .../dolphinemu/dolphinemu/dialogs/AlertMessage.java | 8 ++++---- .../features/cheats/ui/CheatDetailsFragment.java | 1 - .../features/cheats/ui/CheatItemViewHolder.java | 2 -- .../dolphinemu/features/cheats/ui/CheatViewHolder.java | 1 - .../dolphinemu/features/cheats/ui/CheatsAdapter.java | 1 - .../features/cheats/ui/HeaderViewHolder.java | 1 - .../riivolution/ui/RiivolutionBootActivity.java | 2 -- .../features/riivolution/ui/RiivolutionViewHolder.java | 1 - .../features/settings/model/view/FilePicker.java | 4 ++-- .../settings/model/view/LogCheckBoxSetting.java | 2 -- .../features/settings/model/view/SettingsItem.java | 1 - .../features/settings/ui/SettingsActivity.java | 1 - .../settings/ui/SettingsFragmentPresenter.java | 4 ---- .../ui/viewholder/CheckBoxSettingViewHolder.java | 3 --- .../features/settings/utils/SettingsFile.java | 1 - .../dolphinemu/fragments/ConvertFragment.java | 10 +++++----- .../dolphinemu/dolphinemu/ui/main/MainActivity.java | 4 ---- .../dolphinemu/dolphinemu/ui/main/MainPresenter.java | 3 +-- .../dolphinemu/ui/platform/PlatformGamesView.java | 3 --- .../dolphinemu/utils/AlertDialogItemsBuilder.java | 4 ++-- .../dolphinemu/dolphinemu/utils/ContentHandler.java | 3 +-- .../org/dolphinemu/dolphinemu/utils/NetworkHelper.java | 1 - Source/Android/app/src/main/res/menu/menu_settings.xml | 3 +-- 24 files changed, 18 insertions(+), 50 deletions(-) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/activities/ConvertActivity.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/activities/ConvertActivity.java index 738eb033e894..4678a798e46b 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/activities/ConvertActivity.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/activities/ConvertActivity.java @@ -6,11 +6,11 @@ import android.content.Intent; import android.os.Bundle; +import androidx.appcompat.app.AppCompatActivity; + import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.fragments.ConvertFragment; -import androidx.appcompat.app.AppCompatActivity; - public class ConvertActivity extends AppCompatActivity { private static final String ARG_GAME_PATH = "game_path"; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/dialogs/AlertMessage.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/dialogs/AlertMessage.java index 4ca0cf293720..870d244033a1 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/dialogs/AlertMessage.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/dialogs/AlertMessage.java @@ -5,14 +5,14 @@ import android.app.Dialog; import android.os.Bundle; -import org.dolphinemu.dolphinemu.NativeLibrary; -import org.dolphinemu.dolphinemu.R; -import org.dolphinemu.dolphinemu.activities.EmulationActivity; - import androidx.annotation.NonNull; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.DialogFragment; +import org.dolphinemu.dolphinemu.NativeLibrary; +import org.dolphinemu.dolphinemu.R; +import org.dolphinemu.dolphinemu.activities.EmulationActivity; + public final class AlertMessage extends DialogFragment { private static boolean sAlertResult = false; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatDetailsFragment.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatDetailsFragment.java index 2ab6bb31b09c..de147c35b7cf 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatDetailsFragment.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatDetailsFragment.java @@ -15,7 +15,6 @@ import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.Fragment; -import androidx.lifecycle.LiveData; import androidx.lifecycle.ViewModelProvider; import org.dolphinemu.dolphinemu.R; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatItemViewHolder.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatItemViewHolder.java index b4e38f4c1b55..681f3ae78546 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatItemViewHolder.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatItemViewHolder.java @@ -7,8 +7,6 @@ import androidx.annotation.NonNull; import androidx.recyclerview.widget.RecyclerView; -import org.dolphinemu.dolphinemu.features.cheats.model.CheatsViewModel; - public abstract class CheatItemViewHolder extends RecyclerView.ViewHolder { public CheatItemViewHolder(@NonNull View itemView) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatViewHolder.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatViewHolder.java index 58e391fb0171..a37abd145aa3 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatViewHolder.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatViewHolder.java @@ -9,7 +9,6 @@ import androidx.annotation.NonNull; import androidx.lifecycle.ViewModelProvider; -import androidx.recyclerview.widget.RecyclerView.ViewHolder; import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.features.cheats.model.Cheat; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatsAdapter.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatsAdapter.java index 5994feab2a33..ec256cf6e2a6 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatsAdapter.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/CheatsAdapter.java @@ -7,7 +7,6 @@ import android.view.ViewGroup; import androidx.annotation.NonNull; -import androidx.lifecycle.LifecycleOwner; import androidx.recyclerview.widget.RecyclerView; import org.dolphinemu.dolphinemu.R; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/HeaderViewHolder.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/HeaderViewHolder.java index 57c0edebb0da..c48ec500ff5d 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/HeaderViewHolder.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/cheats/ui/HeaderViewHolder.java @@ -8,7 +8,6 @@ import androidx.annotation.NonNull; import org.dolphinemu.dolphinemu.R; -import org.dolphinemu.dolphinemu.features.cheats.model.CheatsViewModel; public class HeaderViewHolder extends CheatItemViewHolder { diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java index 26016a800fa1..c990f95dd746 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java @@ -8,7 +8,6 @@ import android.widget.Button; import android.widget.TextView; -import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -16,7 +15,6 @@ import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.activities.EmulationActivity; import org.dolphinemu.dolphinemu.features.riivolution.model.RiivolutionPatches; -import org.dolphinemu.dolphinemu.ui.DividerItemDecoration; import org.dolphinemu.dolphinemu.utils.DirectoryInitialization; public class RiivolutionBootActivity extends AppCompatActivity diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionViewHolder.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionViewHolder.java index e51a575262d1..a24f6c991dd4 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionViewHolder.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionViewHolder.java @@ -10,7 +10,6 @@ import android.widget.TextView; import androidx.annotation.NonNull; -import androidx.annotation.Nullable; import androidx.recyclerview.widget.RecyclerView; import org.dolphinemu.dolphinemu.R; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/FilePicker.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/FilePicker.java index 619345580320..d5d195158749 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/FilePicker.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/FilePicker.java @@ -4,12 +4,12 @@ import android.content.Context; +import androidx.annotation.Nullable; + import org.dolphinemu.dolphinemu.features.settings.model.AbstractSetting; import org.dolphinemu.dolphinemu.features.settings.model.AbstractStringSetting; import org.dolphinemu.dolphinemu.features.settings.model.Settings; -import androidx.annotation.Nullable; - public final class FilePicker extends SettingsItem { private AbstractStringSetting mSetting; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/LogCheckBoxSetting.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/LogCheckBoxSetting.java index bd7341336d64..ca47a92a48f9 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/LogCheckBoxSetting.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/LogCheckBoxSetting.java @@ -2,8 +2,6 @@ package org.dolphinemu.dolphinemu.features.settings.model.view; -import android.content.Context; - import org.dolphinemu.dolphinemu.features.settings.model.AdHocBooleanSetting; import org.dolphinemu.dolphinemu.features.settings.model.Settings; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/SettingsItem.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/SettingsItem.java index 47d54043ad35..fade63e26d26 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/SettingsItem.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/view/SettingsItem.java @@ -4,7 +4,6 @@ import android.content.Context; -import org.dolphinemu.dolphinemu.DolphinApplication; import org.dolphinemu.dolphinemu.NativeLibrary; import org.dolphinemu.dolphinemu.features.settings.model.AbstractSetting; import org.dolphinemu.dolphinemu.features.settings.model.Settings; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsActivity.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsActivity.java index 43a920c01982..3cc3e2f9107a 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsActivity.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsActivity.java @@ -10,7 +10,6 @@ import android.provider.Settings; import android.view.Menu; import android.view.MenuInflater; -import android.view.MenuItem; import android.widget.Toast; import androidx.annotation.NonNull; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java index ac667e23f072..b9c2c3f840ef 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java @@ -7,7 +7,6 @@ import android.os.Bundle; import android.text.TextUtils; -import org.dolphinemu.dolphinemu.DolphinApplication; import org.dolphinemu.dolphinemu.NativeLibrary; import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.activities.UserDataActivity; @@ -41,11 +40,8 @@ import org.dolphinemu.dolphinemu.features.settings.model.view.SubmenuSetting; import org.dolphinemu.dolphinemu.features.settings.utils.SettingsFile; import org.dolphinemu.dolphinemu.ui.main.MainPresenter; -import org.dolphinemu.dolphinemu.utils.DirectoryInitialization; import org.dolphinemu.dolphinemu.utils.EGLHelper; -import org.dolphinemu.dolphinemu.utils.Log; -import java.io.File; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/viewholder/CheckBoxSettingViewHolder.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/viewholder/CheckBoxSettingViewHolder.java index 6327569ceaf7..b4e9b15ea841 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/viewholder/CheckBoxSettingViewHolder.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/viewholder/CheckBoxSettingViewHolder.java @@ -2,7 +2,6 @@ package org.dolphinemu.dolphinemu.features.settings.ui.viewholder; -import android.text.TextUtils; import android.view.View; import android.widget.CheckBox; import android.widget.TextView; @@ -11,10 +10,8 @@ import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.features.settings.model.view.CheckBoxSetting; -import org.dolphinemu.dolphinemu.features.settings.model.view.LogCheckBoxSetting; import org.dolphinemu.dolphinemu.features.settings.model.view.SettingsItem; import org.dolphinemu.dolphinemu.features.settings.ui.SettingsAdapter; -import org.dolphinemu.dolphinemu.features.settings.ui.SettingsFragmentPresenter; public final class CheckBoxSettingViewHolder extends SettingViewHolder { diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java index 29db905bac3f..dfa78e6eb2ae 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java @@ -4,7 +4,6 @@ import androidx.annotation.NonNull; -import org.dolphinemu.dolphinemu.features.settings.model.Settings; import org.dolphinemu.dolphinemu.features.settings.ui.SettingsActivityView; import org.dolphinemu.dolphinemu.utils.BiMap; import org.dolphinemu.dolphinemu.utils.DirectoryInitialization; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/fragments/ConvertFragment.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/fragments/ConvertFragment.java index d283a390259f..180e100c42c5 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/fragments/ConvertFragment.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/fragments/ConvertFragment.java @@ -17,6 +17,11 @@ import android.widget.CheckBox; import android.widget.Spinner; +import androidx.annotation.NonNull; +import androidx.annotation.StringRes; +import androidx.appcompat.app.AlertDialog; +import androidx.fragment.app.Fragment; + import org.dolphinemu.dolphinemu.NativeLibrary; import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.model.GameFile; @@ -26,11 +31,6 @@ import java.io.File; import java.util.ArrayList; -import androidx.annotation.NonNull; -import androidx.annotation.StringRes; -import androidx.appcompat.app.AlertDialog; -import androidx.fragment.app.Fragment; - public class ConvertFragment extends Fragment implements View.OnClickListener { private static class SpinnerValue implements AdapterView.OnItemSelectedListener diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainActivity.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainActivity.java index 711f7d36def3..1224fa0397d4 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainActivity.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainActivity.java @@ -14,7 +14,6 @@ import androidx.annotation.Nullable; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.Toolbar; -import androidx.lifecycle.ViewModelProvider; import androidx.swiperefreshlayout.widget.SwipeRefreshLayout; import androidx.viewpager.widget.ViewPager; @@ -28,9 +27,6 @@ import org.dolphinemu.dolphinemu.features.settings.model.NativeConfig; import org.dolphinemu.dolphinemu.features.settings.ui.MenuTag; import org.dolphinemu.dolphinemu.features.settings.ui.SettingsActivity; -import org.dolphinemu.dolphinemu.features.sysupdate.ui.OnlineUpdateProgressBarDialogFragment; -import org.dolphinemu.dolphinemu.features.sysupdate.ui.SystemMenuNotInstalledDialogFragment; -import org.dolphinemu.dolphinemu.features.sysupdate.ui.SystemUpdateViewModel; import org.dolphinemu.dolphinemu.services.GameFileCacheManager; import org.dolphinemu.dolphinemu.ui.platform.Platform; import org.dolphinemu.dolphinemu.ui.platform.PlatformGamesView; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainPresenter.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainPresenter.java index 1923927af802..7ec06868f39f 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainPresenter.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/main/MainPresenter.java @@ -6,8 +6,8 @@ import android.content.Intent; import android.net.Uri; -import androidx.appcompat.app.AlertDialog; import androidx.activity.ComponentActivity; +import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.FragmentActivity; import androidx.lifecycle.Observer; import androidx.lifecycle.ViewModelProvider; @@ -32,7 +32,6 @@ import java.util.Arrays; import java.util.concurrent.ExecutionException; -import java.util.function.Supplier; public final class MainPresenter { diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/platform/PlatformGamesView.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/platform/PlatformGamesView.java index 803e680615d1..a728408d7039 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/platform/PlatformGamesView.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/ui/platform/PlatformGamesView.java @@ -2,9 +2,6 @@ package org.dolphinemu.dolphinemu.ui.platform; -import androidx.annotation.Nullable; -import androidx.swiperefreshlayout.widget.SwipeRefreshLayout; - /** * Abstraction for a screen representing a single platform's games. */ diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/AlertDialogItemsBuilder.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/AlertDialogItemsBuilder.java index 54282d896304..8a357ff7a258 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/AlertDialogItemsBuilder.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/AlertDialogItemsBuilder.java @@ -2,11 +2,11 @@ package org.dolphinemu.dolphinemu.utils; -import androidx.appcompat.app.AlertDialog; - import android.content.Context; import android.content.DialogInterface.OnClickListener; +import androidx.appcompat.app.AlertDialog; + import java.util.ArrayList; public class AlertDialogItemsBuilder diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/ContentHandler.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/ContentHandler.java index e903dc8ef6dd..c90c26696380 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/ContentHandler.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/ContentHandler.java @@ -8,11 +8,10 @@ import android.provider.DocumentsContract; import android.provider.DocumentsContract.Document; +import androidx.annotation.Keep; import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import androidx.annotation.Keep; - import org.dolphinemu.dolphinemu.DolphinApplication; import java.io.FileNotFoundException; diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/NetworkHelper.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/NetworkHelper.java index a9641937034b..380abc949e14 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/NetworkHelper.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/utils/NetworkHelper.java @@ -19,7 +19,6 @@ import java.net.Inet4Address; import java.net.InetAddress; import java.net.UnknownHostException; -import java.util.List; public class NetworkHelper { diff --git a/Source/Android/app/src/main/res/menu/menu_settings.xml b/Source/Android/app/src/main/res/menu/menu_settings.xml index 9988d99f9908..d226103169ef 100644 --- a/Source/Android/app/src/main/res/menu/menu_settings.xml +++ b/Source/Android/app/src/main/res/menu/menu_settings.xml @@ -1,3 +1,2 @@ - + From da888c8b49a8da2a2456f67c2db11f5c7392a013 Mon Sep 17 00:00:00 2001 From: JMC47 Date: Fri, 4 Feb 2022 02:58:36 -0500 Subject: [PATCH 011/237] Remove MMU Default in Disney Trio of Destruction Apparently MMU is no longer needed in these titles. All 3 booted and played into gameplay in the latest builds without issues, and disabling MMU results in a very slight, but noticeable performance boost. --- Data/Sys/GameSettings/SCY.ini | 1 - Data/Sys/GameSettings/SQI.ini | 1 - Data/Sys/GameSettings/STS.ini | 1 - 3 files changed, 3 deletions(-) diff --git a/Data/Sys/GameSettings/SCY.ini b/Data/Sys/GameSettings/SCY.ini index fadeb34028bf..0f8964e56de6 100644 --- a/Data/Sys/GameSettings/SCY.ini +++ b/Data/Sys/GameSettings/SCY.ini @@ -2,7 +2,6 @@ [Core] # Values set here will override the main Dolphin settings. -MMU = 1 LowDCBZHack = 1 [OnLoad] diff --git a/Data/Sys/GameSettings/SQI.ini b/Data/Sys/GameSettings/SQI.ini index d43df7ba137f..bb7e3c496148 100644 --- a/Data/Sys/GameSettings/SQI.ini +++ b/Data/Sys/GameSettings/SQI.ini @@ -2,7 +2,6 @@ [Core] # Values set here will override the main Dolphin settings. -MMU = 1 LowDCBZHack = 1 [OnLoad] diff --git a/Data/Sys/GameSettings/STS.ini b/Data/Sys/GameSettings/STS.ini index fce0389aaba1..37214b6d6ad8 100644 --- a/Data/Sys/GameSettings/STS.ini +++ b/Data/Sys/GameSettings/STS.ini @@ -2,7 +2,6 @@ [Core] # Values set here will override the main Dolphin settings. -MMU = 1 [OnLoad] # Add memory patches to be loaded once on boot here. From 56ea1c1d74f349a1ed412b7e604decf0fadc9f6c Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Sat, 5 Feb 2022 19:49:06 +0100 Subject: [PATCH 012/237] Core/State: Guard SaveAs() and LoadAs() with a mutex. --- Source/Core/Core/State.cpp | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/Source/Core/Core/State.cpp b/Source/Core/Core/State.cpp index f89c1fc79e40..e3f382a6101f 100644 --- a/Source/Core/Core/State.cpp +++ b/Source/Core/Core/State.cpp @@ -64,7 +64,7 @@ static AfterLoadCallbackFunc s_on_after_load_callback; // Temporary undo state buffer static std::vector g_undo_load_buffer; static std::vector g_current_buffer; -static bool s_load_or_save_in_progress; +static std::mutex s_load_or_save_in_progress_mutex; static std::mutex g_cs_undo_load_buffer; static std::mutex g_cs_current_buffer; @@ -403,11 +403,10 @@ static void CompressAndDumpState(CompressAndDumpState_args save_args) void SaveAs(const std::string& filename, bool wait) { - if (s_load_or_save_in_progress) + std::unique_lock lk(s_load_or_save_in_progress_mutex, std::try_to_lock); + if (!lk) return; - s_load_or_save_in_progress = true; - Core::RunOnCPUThread( [&] { // Measure the size of the buffer. @@ -446,8 +445,6 @@ void SaveAs(const std::string& filename, bool wait) } }, true); - - s_load_or_save_in_progress = false; } bool ReadHeader(const std::string& filename, StateHeader& header) @@ -550,17 +547,18 @@ static void LoadFileStateData(const std::string& filename, std::vector& ret_ void LoadAs(const std::string& filename) { - if (!Core::IsRunning() || s_load_or_save_in_progress) - { + if (!Core::IsRunning()) return; - } - else if (NetPlay::IsNetPlayRunning()) + + if (NetPlay::IsNetPlayRunning()) { OSD::AddMessage("Loading savestates is disabled in Netplay to prevent desyncs"); return; } - s_load_or_save_in_progress = true; + std::unique_lock lk(s_load_or_save_in_progress_mutex, std::try_to_lock); + if (!lk) + return; Core::RunOnCPUThread( [&] { @@ -617,8 +615,6 @@ void LoadAs(const std::string& filename) s_on_after_load_callback(); }, true); - - s_load_or_save_in_progress = false; } void SetOnAfterLoadCallback(AfterLoadCallbackFunc callback) From 73311694b04cc3830689726ddda879e0a0d8b069 Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Sat, 5 Feb 2022 19:56:44 +0100 Subject: [PATCH 013/237] Core/State: Guard g_save_thread with a mutex. --- Source/Core/Core/State.cpp | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/Source/Core/Core/State.cpp b/Source/Core/Core/State.cpp index e3f382a6101f..b1a3c70910a2 100644 --- a/Source/Core/Core/State.cpp +++ b/Source/Core/Core/State.cpp @@ -70,6 +70,7 @@ static std::mutex g_cs_undo_load_buffer; static std::mutex g_cs_current_buffer; static Common::Event g_compressAndDumpStateSyncEvent; +static std::recursive_mutex g_save_thread_mutex; static std::thread g_save_thread; // Don't forget to increase this after doing changes on the savestate system @@ -434,8 +435,12 @@ void SaveAs(const std::string& filename, bool wait) save_args.filename = filename; save_args.wait = wait; - Flush(); - g_save_thread = std::thread(CompressAndDumpState, save_args); + { + std::lock_guard lk(g_save_thread_mutex); + Flush(); + g_save_thread = std::thread(CompressAndDumpState, save_args); + } + g_compressAndDumpStateSyncEvent.Wait(); } else @@ -695,6 +700,8 @@ void SaveFirstSaved() void Flush() { + std::lock_guard lk(g_save_thread_mutex); + // If already saving state, wait for it to finish if (g_save_thread.joinable()) g_save_thread.join(); From 79405de1037e93042b6357e19a43058d4a6b159f Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Mon, 21 Jun 2021 06:15:56 +0200 Subject: [PATCH 014/237] MemArena: Use placeholders on Windows. --- Source/Core/Common/MemArena.h | 19 +- Source/Core/Common/MemArenaWin.cpp | 368 +++++++++++++++++++++++++++-- 2 files changed, 368 insertions(+), 19 deletions(-) diff --git a/Source/Core/Common/MemArena.h b/Source/Core/Common/MemArena.h index 3f3b3adf8969..bdde5579c586 100644 --- a/Source/Core/Common/MemArena.h +++ b/Source/Core/Common/MemArena.h @@ -4,15 +4,16 @@ #pragma once #include - -#ifdef _WIN32 -#include -#endif +#include #include "Common/CommonTypes.h" namespace Common { +#ifdef _WIN32 +struct WindowsMemoryRegion; +#endif + // This class lets you create a block of anonymous RAM, and then arbitrarily map views into it. // Multiple views can mirror the same section of the block, which makes it very convenient for // emulating memory mirrors. @@ -99,7 +100,15 @@ class MemArena final private: #ifdef _WIN32 - HANDLE hMemoryMapping; + WindowsMemoryRegion* EnsureSplitRegionForMapping(void* address, size_t size); + bool JoinRegionsAfterUnmap(void* address, size_t size); + + std::vector m_regions; + void* m_reserved_region = nullptr; + void* m_memory_handle = nullptr; + void* m_api_ms_win_core_memory_l1_1_6_handle = nullptr; + void* m_address_VirtualAlloc2 = nullptr; + void* m_address_MapViewOfFile3 = nullptr; #else int fd; #endif diff --git a/Source/Core/Common/MemArenaWin.cpp b/Source/Core/Common/MemArenaWin.cpp index 3229e417ab19..64346274786c 100644 --- a/Source/Core/Common/MemArenaWin.cpp +++ b/Source/Core/Common/MemArenaWin.cpp @@ -3,70 +3,410 @@ #include "Common/MemArena.h" +#include #include #include -#include #include #include +#include "Common/Assert.h" #include "Common/CommonFuncs.h" #include "Common/CommonTypes.h" +#include "Common/DynamicLibrary.h" #include "Common/Logging/Log.h" #include "Common/MsgHandler.h" #include "Common/StringUtil.h" +using PVirtualAlloc2 = PVOID(WINAPI*)(HANDLE Process, PVOID BaseAddress, SIZE_T Size, + ULONG AllocationType, ULONG PageProtection, + MEM_EXTENDED_PARAMETER* ExtendedParameters, + ULONG ParameterCount); + +using PMapViewOfFile3 = PVOID(WINAPI*)(HANDLE FileMapping, HANDLE Process, PVOID BaseAddress, + ULONG64 Offset, SIZE_T ViewSize, ULONG AllocationType, + ULONG PageProtection, + MEM_EXTENDED_PARAMETER* ExtendedParameters, + ULONG ParameterCount); + +using PIsApiSetImplemented = BOOL(APIENTRY*)(PCSTR Contract); + namespace Common { -MemArena::MemArena() = default; -MemArena::~MemArena() = default; +struct WindowsMemoryRegion +{ + u8* m_start; + size_t m_size; + bool m_is_mapped; + + WindowsMemoryRegion(u8* start, size_t size, bool is_mapped) + : m_start(start), m_size(size), m_is_mapped(is_mapped) + { + } +}; + +MemArena::MemArena() +{ + // Check if VirtualAlloc2 and MapViewOfFile3 are available, which provide functionality to reserve + // a memory region no other allocation may occupy while still allowing us to allocate and map + // stuff within it. If they're not available we'll instead fall back to the 'legacy' logic and + // just hope that nothing allocates in our address range. + DynamicLibrary kernelBase{"KernelBase.dll"}; + if (!kernelBase.IsOpen()) + return; + + void* const ptr_IsApiSetImplemented = kernelBase.GetSymbolAddress("IsApiSetImplemented"); + if (!ptr_IsApiSetImplemented) + return; + if (!static_cast(ptr_IsApiSetImplemented)("api-ms-win-core-memory-l1-1-6")) + return; + + const HMODULE handle = LoadLibraryW(L"api-ms-win-core-memory-l1-1-6.dll"); + if (!handle) + return; + + void* const address_VirtualAlloc2 = GetProcAddress(handle, "VirtualAlloc2FromApp"); + void* const address_MapViewOfFile3 = GetProcAddress(handle, "MapViewOfFile3FromApp"); + if (address_VirtualAlloc2 && address_MapViewOfFile3) + { + m_api_ms_win_core_memory_l1_1_6_handle = handle; + m_address_VirtualAlloc2 = address_VirtualAlloc2; + m_address_MapViewOfFile3 = address_MapViewOfFile3; + } + else + { + // at least one function is not available, use legacy logic + FreeLibrary(handle); + } +} + +MemArena::~MemArena() +{ + ReleaseMemoryRegion(); + ReleaseSHMSegment(); + if (m_api_ms_win_core_memory_l1_1_6_handle) + FreeLibrary(static_cast(m_api_ms_win_core_memory_l1_1_6_handle)); +} void MemArena::GrabSHMSegment(size_t size) { const std::string name = "dolphin-emu." + std::to_string(GetCurrentProcessId()); - hMemoryMapping = CreateFileMapping(INVALID_HANDLE_VALUE, nullptr, PAGE_READWRITE, 0, - static_cast(size), UTF8ToTStr(name).c_str()); + m_memory_handle = CreateFileMapping(INVALID_HANDLE_VALUE, nullptr, PAGE_READWRITE, 0, + static_cast(size), UTF8ToTStr(name).c_str()); } void MemArena::ReleaseSHMSegment() { - CloseHandle(hMemoryMapping); - hMemoryMapping = 0; + if (!m_memory_handle) + return; + CloseHandle(m_memory_handle); + m_memory_handle = nullptr; } void* MemArena::CreateView(s64 offset, size_t size) { - return MapInMemoryRegion(offset, size, nullptr); + return MapViewOfFileEx(m_memory_handle, FILE_MAP_ALL_ACCESS, 0, (DWORD)((u64)offset), size, + nullptr); } void MemArena::ReleaseView(void* view, size_t size) { - UnmapFromMemoryRegion(view, size); + UnmapViewOfFile(view); } u8* MemArena::ReserveMemoryRegion(size_t memory_size) { - u8* base = static_cast(VirtualAlloc(nullptr, memory_size, MEM_RESERVE, PAGE_READWRITE)); - if (!base) + if (m_reserved_region) { - PanicAlertFmt("Failed to map enough memory space: {}", GetLastErrorString()); + PanicAlertFmt("Tried to reserve a second memory region from the same MemArena."); return nullptr; } - VirtualFree(base, 0, MEM_RELEASE); + + u8* base; + if (m_api_ms_win_core_memory_l1_1_6_handle) + { + base = static_cast(static_cast(m_address_VirtualAlloc2)( + nullptr, nullptr, memory_size, MEM_RESERVE | MEM_RESERVE_PLACEHOLDER, PAGE_NOACCESS, + nullptr, 0)); + if (base) + { + m_reserved_region = base; + m_regions.emplace_back(base, memory_size, false); + } + else + { + PanicAlertFmt("Failed to map enough memory space: {}", GetLastErrorString()); + } + } + else + { + NOTICE_LOG_FMT(MEMMAP, "VirtualAlloc2 and/or MapViewFromFile3 unavailable. " + "Falling back to legacy memory mapping."); + base = static_cast(VirtualAlloc(nullptr, memory_size, MEM_RESERVE, PAGE_READWRITE)); + if (base) + VirtualFree(base, 0, MEM_RELEASE); + else + PanicAlertFmt("Failed to find enough memory space: {}", GetLastErrorString()); + } + return base; } void MemArena::ReleaseMemoryRegion() { + if (m_api_ms_win_core_memory_l1_1_6_handle && m_reserved_region) + { + // user should have unmapped everything by this point, check if that's true and yell if not + // (it indicates a bug in the emulated memory mapping logic) + size_t mapped_region_count = 0; + for (const auto& r : m_regions) + { + if (r.m_is_mapped) + ++mapped_region_count; + } + + if (mapped_region_count > 0) + { + PanicAlertFmt("Error while releasing fastmem region: {} regions are still mapped!", + mapped_region_count); + } + + // then free memory + VirtualFree(m_reserved_region, 0, MEM_RELEASE); + m_reserved_region = nullptr; + m_regions.clear(); + } +} + +WindowsMemoryRegion* MemArena::EnsureSplitRegionForMapping(void* start_address, size_t size) +{ + u8* const address = static_cast(start_address); + auto& regions = m_regions; + if (regions.empty()) + { + NOTICE_LOG_FMT(MEMMAP, "Tried to map a memory region without reserving a memory block first."); + return nullptr; + } + + // find closest region that is <= the given address by using upper bound and decrementing + auto it = std::upper_bound( + regions.begin(), regions.end(), address, + [](u8* addr, const WindowsMemoryRegion& region) { return addr < region.m_start; }); + if (it == regions.begin()) + { + // this should never happen, implies that the given address is before the start of the + // reserved memory block + NOTICE_LOG_FMT(MEMMAP, "Invalid address {} given to map.", fmt::ptr(address)); + return nullptr; + } + --it; + + if (it->m_is_mapped) + { + NOTICE_LOG_FMT(MEMMAP, + "Address to map {} with a size of 0x{:x} overlaps with existing mapping " + "at {}.", + fmt::ptr(address), size, fmt::ptr(it->m_start)); + return nullptr; + } + + const size_t mapping_index = it - regions.begin(); + u8* const mapping_address = it->m_start; + const size_t mapping_size = it->m_size; + if (mapping_address == address) + { + // if this region is already split up correctly we don't have to do anything + if (mapping_size == size) + return &*it; + + // if this region is smaller than the requested size we can't map + if (mapping_size < size) + { + NOTICE_LOG_FMT(MEMMAP, + "Not enough free space at address {} to map 0x{:x} bytes (0x{:x} available).", + fmt::ptr(mapping_address), size, mapping_size); + return nullptr; + } + + // split region + if (!VirtualFree(address, size, MEM_RELEASE | MEM_PRESERVE_PLACEHOLDER)) + { + NOTICE_LOG_FMT(MEMMAP, "Region splitting failed: {}", GetLastErrorString()); + return nullptr; + } + + // update tracked mappings and return the first of the two + it->m_size = size; + u8* const new_mapping_start = address + size; + const size_t new_mapping_size = mapping_size - size; + regions.insert(it + 1, WindowsMemoryRegion(new_mapping_start, new_mapping_size, false)); + return ®ions[mapping_index]; + } + + ASSERT(mapping_address < address); + + // is there enough space to map this? + const size_t size_before = static_cast(address - mapping_address); + const size_t minimum_size = size + size_before; + if (mapping_size < minimum_size) + { + NOTICE_LOG_FMT(MEMMAP, + "Not enough free space at address {} to map memory region (need 0x{:x} " + "bytes, but only 0x{:x} available).", + fmt::ptr(address), minimum_size, mapping_size); + return nullptr; + } + + // split region + if (!VirtualFree(address, size, MEM_RELEASE | MEM_PRESERVE_PLACEHOLDER)) + { + NOTICE_LOG_FMT(MEMMAP, "Region splitting failed: {}", GetLastErrorString()); + return nullptr; + } + + // do we now have two regions or three regions? + if (mapping_size == minimum_size) + { + // split into two; update tracked mappings and return the second one + it->m_size = size_before; + u8* const new_mapping_start = address; + const size_t new_mapping_size = size; + regions.insert(it + 1, WindowsMemoryRegion(new_mapping_start, new_mapping_size, false)); + return ®ions[mapping_index + 1]; + } + else + { + // split into three; update tracked mappings and return the middle one + it->m_size = size_before; + u8* const middle_mapping_start = address; + const size_t middle_mapping_size = size; + u8* const after_mapping_start = address + size; + const size_t after_mapping_size = mapping_size - minimum_size; + regions.insert(it + 1, WindowsMemoryRegion(after_mapping_start, after_mapping_size, false)); + regions.insert(regions.begin() + mapping_index + 1, + WindowsMemoryRegion(middle_mapping_start, middle_mapping_size, false)); + return ®ions[mapping_index + 1]; + } } void* MemArena::MapInMemoryRegion(s64 offset, size_t size, void* base) { - return MapViewOfFileEx(hMemoryMapping, FILE_MAP_ALL_ACCESS, 0, (DWORD)((u64)offset), size, base); + if (m_api_ms_win_core_memory_l1_1_6_handle) + { + WindowsMemoryRegion* const region = EnsureSplitRegionForMapping(base, size); + if (!region) + { + PanicAlertFmt("Splitting memory region failed."); + return nullptr; + } + + void* rv = static_cast(m_address_MapViewOfFile3)( + m_memory_handle, nullptr, base, offset, size, MEM_REPLACE_PLACEHOLDER, PAGE_READWRITE, + nullptr, 0); + if (rv) + { + region->m_is_mapped = true; + } + else + { + PanicAlertFmt("Mapping memory region failed: {}", GetLastErrorString()); + + // revert the split, if any + JoinRegionsAfterUnmap(base, size); + } + return rv; + } + + return MapViewOfFileEx(m_memory_handle, FILE_MAP_ALL_ACCESS, 0, (DWORD)((u64)offset), size, base); +} + +bool MemArena::JoinRegionsAfterUnmap(void* start_address, size_t size) +{ + u8* const address = static_cast(start_address); + auto& regions = m_regions; + if (regions.empty()) + { + NOTICE_LOG_FMT(MEMMAP, + "Tried to unmap a memory region without reserving a memory block first."); + return false; + } + + // there should be a mapping that matches the request exactly, find it + auto it = std::lower_bound( + regions.begin(), regions.end(), address, + [](const WindowsMemoryRegion& region, u8* addr) { return region.m_start < addr; }); + if (it == regions.end() || it->m_start != address || it->m_size != size) + { + // didn't find it, we were given bogus input + NOTICE_LOG_FMT(MEMMAP, "Invalid address/size given to unmap."); + return false; + } + it->m_is_mapped = false; + + const bool can_join_with_preceding = it != regions.begin() && !(it - 1)->m_is_mapped; + const bool can_join_with_succeeding = (it + 1) != regions.end() && !(it + 1)->m_is_mapped; + if (can_join_with_preceding && can_join_with_succeeding) + { + // join three mappings to one + auto it_preceding = it - 1; + auto it_succeeding = it + 1; + const size_t total_size = it_preceding->m_size + size + it_succeeding->m_size; + if (!VirtualFree(it_preceding->m_start, total_size, MEM_RELEASE | MEM_COALESCE_PLACEHOLDERS)) + { + NOTICE_LOG_FMT(MEMMAP, "Region coalescing failed: {}", GetLastErrorString()); + return false; + } + + it_preceding->m_size = total_size; + regions.erase(it, it + 2); + } + else if (can_join_with_preceding && !can_join_with_succeeding) + { + // join two mappings to one + auto it_preceding = it - 1; + const size_t total_size = it_preceding->m_size + size; + if (!VirtualFree(it_preceding->m_start, total_size, MEM_RELEASE | MEM_COALESCE_PLACEHOLDERS)) + { + NOTICE_LOG_FMT(MEMMAP, "Region coalescing failed: {}", GetLastErrorString()); + return false; + } + + it_preceding->m_size = total_size; + regions.erase(it); + } + else if (!can_join_with_preceding && can_join_with_succeeding) + { + // join two mappings to one + auto it_succeeding = it + 1; + const size_t total_size = size + it_succeeding->m_size; + if (!VirtualFree(it->m_start, total_size, MEM_RELEASE | MEM_COALESCE_PLACEHOLDERS)) + { + NOTICE_LOG_FMT(MEMMAP, "Region coalescing failed: {}", GetLastErrorString()); + return false; + } + + it->m_size = total_size; + regions.erase(it_succeeding); + } + return true; } void MemArena::UnmapFromMemoryRegion(void* view, size_t size) { + if (m_api_ms_win_core_memory_l1_1_6_handle) + { + if (UnmapViewOfFileEx(view, MEM_PRESERVE_PLACEHOLDER)) + { + if (!JoinRegionsAfterUnmap(view, size)) + PanicAlertFmt("Joining memory region failed."); + } + else + { + PanicAlertFmt("Unmapping memory region failed: {}", GetLastErrorString()); + } + return; + } + UnmapViewOfFile(view); } } // namespace Common From ca31964833a41b2968b933144eeec86242dbd67a Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Sun, 6 Feb 2022 17:53:45 +0100 Subject: [PATCH 015/237] Externals: Update minizip to minizip-ng version 3.0.4. --- Externals/minizip/CMakeLists.txt | 69 +- Externals/minizip/CMakeLists.txt.original | 1015 ++++++++ Externals/minizip/LICENSE | 2 +- Externals/minizip/README | 5 +- Externals/minizip/ioapi.c | 351 --- Externals/minizip/ioapi.h | 146 -- Externals/minizip/minigzip.c | 182 ++ Externals/minizip/minizip.c | 671 +++++ Externals/minizip/minizip.pc.cmakein | 14 + Externals/minizip/minizip.vcxproj | 30 +- Externals/minizip/mz.h | 274 ++ Externals/minizip/mz_compat.c | 1303 ++++++++++ Externals/minizip/mz_compat.h | 396 +++ Externals/minizip/mz_crypt.c | 196 ++ Externals/minizip/mz_crypt.h | 65 + Externals/minizip/mz_crypt_apple.c | 487 ++++ Externals/minizip/mz_crypt_openssl.c | 638 +++++ Externals/minizip/mz_crypt_win32.c | 739 ++++++ Externals/minizip/mz_os.c | 354 +++ Externals/minizip/mz_os.h | 175 ++ Externals/minizip/mz_os_posix.c | 367 +++ Externals/minizip/mz_os_win32.c | 659 +++++ Externals/minizip/mz_strm.c | 560 +++++ Externals/minizip/mz_strm.h | 132 + Externals/minizip/mz_strm_buf.c | 385 +++ Externals/minizip/mz_strm_buf.h | 42 + Externals/minizip/mz_strm_bzip.c | 374 +++ Externals/minizip/mz_strm_bzip.h | 45 + Externals/minizip/mz_strm_libcomp.c | 356 +++ Externals/minizip/mz_strm_libcomp.h | 45 + Externals/minizip/mz_strm_lzma.c | 470 ++++ Externals/minizip/mz_strm_lzma.h | 43 + Externals/minizip/mz_strm_mem.c | 272 ++ Externals/minizip/mz_strm_mem.h | 48 + Externals/minizip/mz_strm_os.h | 40 + Externals/minizip/mz_strm_os_posix.c | 206 ++ Externals/minizip/mz_strm_os_win32.c | 296 +++ Externals/minizip/mz_strm_pkcrypt.c | 338 +++ Externals/minizip/mz_strm_pkcrypt.h | 46 + Externals/minizip/mz_strm_split.c | 438 ++++ Externals/minizip/mz_strm_split.h | 43 + Externals/minizip/mz_strm_wzaes.c | 362 +++ Externals/minizip/mz_strm_wzaes.h | 46 + Externals/minizip/mz_strm_zlib.c | 393 +++ Externals/minizip/mz_strm_zlib.h | 43 + Externals/minizip/mz_strm_zstd.c | 351 +++ Externals/minizip/mz_strm_zstd.h | 44 + Externals/minizip/mz_zip.c | 2771 +++++++++++++++++++++ Externals/minizip/mz_zip.h | 259 ++ Externals/minizip/mz_zip_rw.c | 1943 +++++++++++++++ Externals/minizip/mz_zip_rw.h | 285 +++ Externals/minizip/unzip.c | 1991 --------------- Externals/minizip/unzip.h | 303 +-- Externals/minizip/zip.h | 13 + 54 files changed, 18319 insertions(+), 2802 deletions(-) create mode 100644 Externals/minizip/CMakeLists.txt.original delete mode 100644 Externals/minizip/ioapi.c delete mode 100644 Externals/minizip/ioapi.h create mode 100644 Externals/minizip/minigzip.c create mode 100644 Externals/minizip/minizip.c create mode 100644 Externals/minizip/minizip.pc.cmakein create mode 100644 Externals/minizip/mz.h create mode 100644 Externals/minizip/mz_compat.c create mode 100644 Externals/minizip/mz_compat.h create mode 100644 Externals/minizip/mz_crypt.c create mode 100644 Externals/minizip/mz_crypt.h create mode 100644 Externals/minizip/mz_crypt_apple.c create mode 100644 Externals/minizip/mz_crypt_openssl.c create mode 100644 Externals/minizip/mz_crypt_win32.c create mode 100644 Externals/minizip/mz_os.c create mode 100644 Externals/minizip/mz_os.h create mode 100644 Externals/minizip/mz_os_posix.c create mode 100644 Externals/minizip/mz_os_win32.c create mode 100644 Externals/minizip/mz_strm.c create mode 100644 Externals/minizip/mz_strm.h create mode 100644 Externals/minizip/mz_strm_buf.c create mode 100644 Externals/minizip/mz_strm_buf.h create mode 100644 Externals/minizip/mz_strm_bzip.c create mode 100644 Externals/minizip/mz_strm_bzip.h create mode 100644 Externals/minizip/mz_strm_libcomp.c create mode 100644 Externals/minizip/mz_strm_libcomp.h create mode 100644 Externals/minizip/mz_strm_lzma.c create mode 100644 Externals/minizip/mz_strm_lzma.h create mode 100644 Externals/minizip/mz_strm_mem.c create mode 100644 Externals/minizip/mz_strm_mem.h create mode 100644 Externals/minizip/mz_strm_os.h create mode 100644 Externals/minizip/mz_strm_os_posix.c create mode 100644 Externals/minizip/mz_strm_os_win32.c create mode 100644 Externals/minizip/mz_strm_pkcrypt.c create mode 100644 Externals/minizip/mz_strm_pkcrypt.h create mode 100644 Externals/minizip/mz_strm_split.c create mode 100644 Externals/minizip/mz_strm_split.h create mode 100644 Externals/minizip/mz_strm_wzaes.c create mode 100644 Externals/minizip/mz_strm_wzaes.h create mode 100644 Externals/minizip/mz_strm_zlib.c create mode 100644 Externals/minizip/mz_strm_zlib.h create mode 100644 Externals/minizip/mz_strm_zstd.c create mode 100644 Externals/minizip/mz_strm_zstd.h create mode 100644 Externals/minizip/mz_zip.c create mode 100644 Externals/minizip/mz_zip.h create mode 100644 Externals/minizip/mz_zip_rw.c create mode 100644 Externals/minizip/mz_zip_rw.h delete mode 100644 Externals/minizip/unzip.c create mode 100644 Externals/minizip/zip.h diff --git a/Externals/minizip/CMakeLists.txt b/Externals/minizip/CMakeLists.txt index e9111242966f..b28e136681c9 100644 --- a/Externals/minizip/CMakeLists.txt +++ b/Externals/minizip/CMakeLists.txt @@ -1,18 +1,69 @@ project(minizip C) -set(SRCS ioapi.c unzip.c) - -add_definitions(-DNOUNCRYPT) -if(UNIX) - add_definitions(-D__USE_FILE_OFFSET64) - add_definitions(-D__USE_LARGEFILE64) - add_definitions(-D_LARGEFILE64_SOURCE) - add_definitions(-D_FILE_OFFSET_BIT=64) +add_library(minizip STATIC + mz.h + mz_compat.c + mz_compat.h + mz_crypt.c + mz_crypt.h + mz_os.c + mz_os.h + mz_strm.c + mz_strm.h + mz_strm_buf.c + mz_strm_buf.h + mz_strm_mem.c + mz_strm_mem.h + mz_strm_os.h + mz_strm_split.c + mz_strm_split.h + mz_strm_zlib.c + mz_strm_zlib.h + mz_zip.c + mz_zip.h + mz_zip_rw.c + mz_zip_rw.h + unzip.h + zip.h +) + +if (UNIX) + target_sources(minizip PRIVATE + mz_os_posix.c + mz_strm_os_posix.c + ) +endif() + +if (WIN32) + target_sources(minizip PRIVATE + mz_os_win32.c + mz_strm_os_win32.c + ) endif() -add_library(minizip STATIC ${SRCS}) target_include_directories(minizip PUBLIC .) +target_compile_definitions(minizip PRIVATE HAVE_ZLIB ZLIB_COMPAT MZ_ZIP_NO_CRYPTO MZ_ZIP_NO_ENCRYPTION) +if (UNIX) + target_compile_definitions(minizip PRIVATE _POSIX_C_SOURCE=200112L) + target_compile_definitions(minizip PRIVATE __USE_LARGEFILE64 _LARGEFILE64_SOURCE) +endif() + +check_include_file(stdint.h HAVE_STDINT_H) +if (HAVE_STDINT_H) + target_compile_definitions(minizip PRIVATE HAVE_STDINT_H) +endif() + +check_include_file(inttypes.h HAVE_INTTYPES_H) +if (HAVE_INTTYPES_H) + target_compile_definitions(minizip PRIVATE HAVE_INTTYPES_H) +endif() + +check_function_exists(fseeko HAVE_FSEEKO) +if (NOT HAVE_FSEEKO) + target_compile_definitions(minizip PRIVATE NO_FSEEKO) +endif() + target_link_libraries(minizip PUBLIC ZLIB::ZLIB) add_library(MiniZip::minizip ALIAS minizip) diff --git a/Externals/minizip/CMakeLists.txt.original b/Externals/minizip/CMakeLists.txt.original new file mode 100644 index 000000000000..38bbf82bbb5c --- /dev/null +++ b/Externals/minizip/CMakeLists.txt.original @@ -0,0 +1,1015 @@ +#*************************************************************************** +# Copyright (C) 2017-2020 Nathan Moinvaziri +# https://github.com/zlib-ng/minizip-ng +# Copyright (C) 2016 Matthias Schmieder +# schmieder.matthias@gmail.com +#*************************************************************************** + +cmake_minimum_required(VERSION 3.13) + +message(STATUS "Using CMake version ${CMAKE_VERSION}") + +# Compatibility options +option(MZ_COMPAT "Enables compatibility layer" ON) +# Compression library options +option(MZ_ZLIB "Enables ZLIB compression" ON) +option(MZ_BZIP2 "Enables BZIP2 compression" ON) +option(MZ_LZMA "Enables LZMA & XZ compression" ON) +option(MZ_ZSTD "Enables ZSTD compression" ON) +option(MZ_LIBCOMP "Enables Apple compression" ${APPLE}) +option(MZ_FETCH_LIBS "Enables fetching third-party libraries if not found" ${WIN32}) +option(MZ_FORCE_FETCH_LIBS "Enables fetching third-party libraries always" OFF) +# Encryption support options +option(MZ_PKCRYPT "Enables PKWARE traditional encryption" ON) +option(MZ_WZAES "Enables WinZIP AES encryption" ON) +option(MZ_OPENSSL "Enables OpenSSL for encryption" ${UNIX}) +option(MZ_LIBBSD "Enable libbsd crypto random" ${UNIX}) +option(MZ_SIGNING "Enables zip signing support" ON) +# Character conversion options +option(MZ_ICONV "Enables iconv for string encoding conversion" ON) +# Code generation options +option(MZ_COMPRESS_ONLY "Only support compression" OFF) +option(MZ_DECOMPRESS_ONLY "Only support decompression" OFF) +option(MZ_FILE32_API "Builds using posix 32-bit file api" OFF) +# Build and continuous integration options +option(MZ_BUILD_TESTS "Builds minizip test executable" OFF) +option(MZ_BUILD_UNIT_TESTS "Builds minizip unit test project" OFF) +option(MZ_BUILD_FUZZ_TESTS "Builds minizip fuzzer executables" OFF) +option(MZ_CODE_COVERAGE "Builds with code coverage flags" OFF) +# Package management options +set(MZ_PROJECT_SUFFIX "" CACHE STRING "Project name suffix for package managers") + +mark_as_advanced(MZ_FILE32_API MZ_PROJECT_SUFFIX) + +# Backwards compatibility +if(DEFINED MZ_BUILD_TEST) + set(MZ_BUILD_TESTS ${MZ_BUILD_TEST}) +endif() +if(DEFINED MZ_BUILD_UNIT_TEST) + set(MZ_BUILD_UNIT_TESTS ${MZ_BUILD_UNIT_TEST}) +endif() +if(DEFINED MZ_BUILD_FUZZ_TEST) + set(MZ_BUILD_FUZZ_TESTS ${MZ_BUILD_FUZZ_TEST}) +endif() + +if(POLICY CMP0074) + cmake_policy(SET CMP0074 OLD) +endif() +if(POLICY CMP0054) + cmake_policy(SET CMP0054 NEW) +endif() + +# ZLIB_ROOT - Parent directory of zlib installation +# BZIP2_ROOT - Parent directory of BZip2 installation +# OPENSSL_ROOT - Parent directory of OpenSSL installation + +enable_language(C) + +# Library version +set(VERSION "3.0.4") + +# API version +set(SOVERSION "3") + +include(CheckLibraryExists) +include(CheckSymbolExists) +include(CheckFunctionExists) +include(CheckIncludeFile) +include(CheckTypeSize) +include(GNUInstallDirs) +include(FeatureSummary) + +set(INSTALL_BIN_DIR ${CMAKE_INSTALL_BINDIR} CACHE PATH "Installation directory for executables") +set(INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR} CACHE PATH "Installation directory for libraries") +set(INSTALL_INC_DIR ${CMAKE_INSTALL_INCLUDEDIR} CACHE PATH "Installation directory for headers") +set(INSTALL_MAN_DIR ${CMAKE_INSTALL_MANDIR} CACHE PATH "Installation directory for manual pages") + +set(STDLIB_DEF) +set(MINIZIP_DEF) +set(MINIZIP_INC) +set(MINIZIP_LIB) +set(MINIZIP_LBD) +set(MINIZIP_DEP) +set(MINIZIP_DEP_PKG) +set(MINIZIP_LFG) + +# Initial source files +set(MINIZIP_SRC + mz_crypt.c + mz_os.c + mz_strm.c + mz_strm_buf.c + mz_strm_mem.c + mz_strm_split.c + mz_zip.c + mz_zip_rw.c) + +# Initial header files +set(MINIZIP_HDR + mz.h + mz_os.h + mz_crypt.h + mz_strm.h + mz_strm_buf.h + mz_strm_mem.h + mz_strm_split.h + mz_strm_os.h + mz_zip.h + mz_zip_rw.h) + +set(PC_PRIVATE_LIBS) + +# Check for system includes +check_include_file(stdint.h HAVE_STDINT_H) +check_include_file(inttypes.h HAVE_INTTYPES_H) + +if(HAVE_STDINT_H) + list(APPEND STDLIB_DEF -DHAVE_STDINT_H) +endif() +if(HAVE_INTTYPES_H) + list(APPEND STDLIB_DEF -DHAVE_INTTYPES_H) +endif() + +# Check for large file support +check_type_size(off64_t OFF64_T) +if(HAVE_OFF64_T) + list(APPEND STDLIB_DEF -D__USE_LARGEFILE64) + list(APPEND STDLIB_DEF -D_LARGEFILE64_SOURCE) +endif() +# Check for fseeko support +check_function_exists(fseeko HAVE_FSEEKO) +if(NOT HAVE_FSEEKO) + list(APPEND STDLIB_DEF -DNO_FSEEKO) +endif() + +# Checkout remote repository +macro(clone_repo name url) + if(NOT ${name}_REPOSITORY) + set(${name}_REPOSITORY ${url}) + endif() + if(NOT ${name}_TAG) + set(${name}_TAG master) + endif() + + message(STATUS "Fetching ${name} ${${name}_REPOSITORY} ${${name}_TAG}") + + # Check for FetchContent cmake support + if(${CMAKE_VERSION} VERSION_LESS "3.11") + message(FATAL_ERROR "CMake 3.11 required to fetch ${name}") + else() + include(FetchContent) + + string(TOLOWER ${name} name_lower) + string(TOUPPER ${name} name_upper) + + FetchContent_Declare(${name} + GIT_REPOSITORY ${${name}_REPOSITORY} + GIT_TAG ${${name}_TAG} + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/lib/${name_lower}) + + FetchContent_GetProperties(${name} POPULATED ${name_lower}_POPULATED) + + if(NOT ${name_lower}_POPULATED) + FetchContent_Populate(${name}) + endif() + + set(${name_upper}_SOURCE_DIR ${${name_lower}_SOURCE_DIR}) + set(${name_upper}_BINARY_DIR ${${name_lower}_BINARY_DIR}) + endif() +endmacro() + +if(MZ_LIBCOMP) + if(APPLE) + # Use Apple libcompression + list(APPEND MINIZIP_DEF -DHAVE_LIBCOMP) + list(APPEND MINIZIP_SRC mz_strm_libcomp.c) + list(APPEND MINIZIP_HDR mz_strm_libcomp.h) + list(APPEND MINIZIP_LIB compression) + + # Disable zlib as libcompression is preferred + set(MZ_ZLIB OFF) + else() + message(STATUS "LibCompression not supported on the current platform") + + set(MZ_LIBCOMP OFF) + endif() +endif() + +if(MZ_ZLIB) + # Check if zlib is present + if(NOT MZ_FORCE_FETCH_LIBS) + find_package(ZLIB QUIET) + set(ZLIB_VERSION ${ZLIB_VERSION_STRING}) + endif() + + if(ZLIB_FOUND AND NOT MZ_FORCE_FETCH_LIBS) + message(STATUS "Using ZLIB ${ZLIB_VERSION}") + + list(APPEND MINIZIP_INC ${ZLIB_INCLUDE_DIRS}) + list(APPEND MINIZIP_LIB ${ZLIB_LIBRARIES}) + list(APPEND MINIZIP_LBD ${ZLIB_LIBRARY_DIRS}) + + set(PC_PRIVATE_LIBS " -lz") + elseif(MZ_FETCH_LIBS) + clone_repo(zlib https://github.com/madler/zlib) + + # Don't automatically add all targets to the solution + add_subdirectory(${ZLIB_SOURCE_DIR} ${ZLIB_BINARY_DIR} EXCLUDE_FROM_ALL) + + list(APPEND MINIZIP_INC ${ZLIB_SOURCE_DIR}) + list(APPEND MINIZIP_INC ${ZLIB_BINARY_DIR}) + + # Have to add zlib to install targets + if(NOT DEFINED BUILD_SHARED_LIBS OR NOT ${BUILD_SHARED_LIBS}) + list(APPEND MINIZIP_DEP zlibstatic) + else() + list(APPEND MINIZIP_DEP zlib) + endif() + else() + message(STATUS "ZLIB library not found") + + set(MZ_ZLIB OFF) + endif() + + if(MZ_ZLIB) + list(APPEND MINIZIP_DEP_PKG ZLIB) + list(APPEND MINIZIP_DEF -DHAVE_ZLIB) + if(ZLIB_COMPAT) + list(APPEND MINIZIP_DEF -DZLIB_COMPAT) + endif() + list(APPEND MINIZIP_SRC mz_strm_zlib.c) + list(APPEND MINIZIP_HDR mz_strm_zlib.h) + endif() +endif() + +if(MZ_BZIP2) + # Check if bzip2 is present + if(NOT MZ_FORCE_FETCH_LIBS) + find_package(BZip2 QUIET) + endif() + + if(BZIP2_FOUND AND NOT MZ_FORCE_FETCH_LIBS) + message(STATUS "Using BZIP2 ${BZIP2_VERSION_STRING}") + + list(APPEND MINIZIP_INC ${BZIP2_INCLUDE_DIRS}) + list(APPEND MINIZIP_LIB ${BZIP2_LIBRARIES}) + list(APPEND MINIZIP_LBD ${BZIP2_LIBRARY_DIRS}) + + set(PC_PRIVATE_LIBS "${PC_PRIVATE_LIBS} -lbzip2") + elseif(MZ_FETCH_LIBS) + clone_repo(bzip2 https://sourceware.org/git/bzip2.git) + + # BZip2 repository does not support cmake so we have to create + # the bzip2 library ourselves + set(BZIP2_SRC + ${BZIP2_SOURCE_DIR}/blocksort.c + ${BZIP2_SOURCE_DIR}/bzlib.c + ${BZIP2_SOURCE_DIR}/compress.c + ${BZIP2_SOURCE_DIR}/crctable.c + ${BZIP2_SOURCE_DIR}/decompress.c + ${BZIP2_SOURCE_DIR}/huffman.c + ${BZIP2_SOURCE_DIR}/randtable.c) + + set(BZIP2_HDR + ${BZIP2_SOURCE_DIR}/bzlib.h + ${BZIP2_SOURCE_DIR}/bzlib_private.h) + + add_library(bzip2 STATIC ${BZIP2_SRC} ${BZIP2_HDR}) + + target_compile_definitions(bzip2 PRIVATE -DBZ_NO_STDIO) + + list(APPEND MINIZIP_DEP bzip2) + list(APPEND MINIZIP_INC ${BZIP2_SOURCE_DIR}) + else() + message(STATUS "BZip2 library not found") + + set(MZ_BZIP2 OFF) + endif() + + if(MZ_BZIP2) + list(APPEND MINIZIP_DEP_PKG BZip2) + list(APPEND MINIZIP_DEF -DHAVE_BZIP2) + list(APPEND MINIZIP_SRC mz_strm_bzip.c) + list(APPEND MINIZIP_HDR mz_strm_bzip.h) + endif() +endif() + +if(MZ_LZMA) + # Check if liblzma is present + if(NOT MZ_FORCE_FETCH_LIBS) + find_package(PkgConfig QUIET) + if(PKGCONFIG_FOUND) + pkg_check_modules(LIBLZMA liblzma) + endif() + if(NOT LIBLZMA_FOUND) + find_package(LibLZMA QUIET) + set(LIBLZMA_VERSION ${LIBLZMA_VERSION_STRING}) + endif() + endif() + + if(LIBLZMA_FOUND AND NOT MZ_FORCE_FETCH_LIBS) + message(STATUS "Using LZMA ${LIBLZMA_VERSION}") + + list(APPEND MINIZIP_INC ${LIBLZMA_INCLUDE_DIRS}) + list(APPEND MINIZIP_LIB ${LIBLZMA_LIBRARIES}) + + set(PC_PRIVATE_LIBS "${PC_PRIVATE_LIBS} -lliblzma") + elseif(MZ_FETCH_LIBS) + clone_repo(liblzma https://git.tukaani.org/xz.git) + + # Don't automatically add all targets to the solution + add_subdirectory(${LIBLZMA_SOURCE_DIR} ${LIBLZMA_BINARY_DIR} EXCLUDE_FROM_ALL) + + list(APPEND MINIZIP_INC ${LIBLZMA_SOURCE_DIR}/src/liblzma/api) + list(APPEND MINIZIP_DEP liblzma) + list(APPEND MINIZIP_LIB ${LIBLZMA_TARGET}) + else() + message(STATUS "LibLZMA library not found") + + set(MZ_LZMA OFF) + endif() + + if(MZ_LZMA) + list(APPEND MINIZIP_DEP_PKG LibLZMA) + list(APPEND MINIZIP_DEF -DHAVE_LZMA -DLZMA_API_STATIC) + list(APPEND MINIZIP_SRC mz_strm_lzma.c) + list(APPEND MINIZIP_HDR mz_strm_lzma.h) + endif() +endif() + +if(MZ_ZSTD) + # Check if zstd is present + if(NOT MZ_FORCE_FETCH_LIBS) + find_package(PkgConfig QUIET) + if(PKGCONFIG_FOUND) + pkg_check_modules(ZSTD libzstd) + endif() + if(NOT ZSTD_FOUND) + find_package(ZSTD QUIET) + if(ZSTD_FOUND) + if(TARGET zstd::libzstd_static) + list(APPEND ZSTD_LIBRARIES zstd::libzstd_static) + else() + list(APPEND ZSTD_LIBRARIES zstd::libzstd_shared) + endif() + endif() + endif() + endif() + + if(ZSTD_FOUND AND NOT MZ_FORCE_FETCH_LIBS) + message(STATUS "Using ZSTD ${ZSTD_VERSION}") + + list(APPEND MINIZIP_INC ${ZSTD_INCLUDE_DIRS}) + list(APPEND MINIZIP_LIB ${ZSTD_LIBRARIES}) + list(APPEND MINIZIP_LBD ${ZSTD_LIBRARY_DIRS}) + + set(PC_PRIVATE_LIBS "${PC_PRIVATE_LIBS} -lzstd") + elseif(MZ_FETCH_LIBS) + clone_repo(zstd https://github.com/facebook/zstd) + + # Don't automatically add all targets to the solution + add_subdirectory(${ZSTD_SOURCE_DIR}/build/cmake ${ZSTD_BINARY_DIR} EXCLUDE_FROM_ALL) + + list(APPEND MINIZIP_INC ${ZSTD_SOURCE_DIR}/lib) + if(NOT DEFINED BUILD_SHARED_LIBS OR NOT ${BUILD_SHARED_LIBS}) + list(APPEND MINIZIP_DEP libzstd_static) + else() + list(APPEND MINIZIP_DEP libzstd_shared) + endif() + else() + message(STATUS "ZSTD library not found") + + set(MZ_ZSTD OFF) + endif() + + if(MZ_ZSTD) + list(APPEND MINIZIP_DEP_PKG zstd) + list(APPEND MINIZIP_DEF -DHAVE_ZSTD) + list(APPEND MINIZIP_SRC mz_strm_zstd.c) + list(APPEND MINIZIP_HDR mz_strm_zstd.h) + endif() +endif() + +if(NOT MZ_LIBCOMP AND NOT MZ_ZLIB AND NOT MZ_ZSTD AND NOT MZ_BZIP2 AND NOT MZ_LZMA) + message(STATUS "Compression not supported due to missing libraries") + + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_DECOMPRESSION) + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_COMPRESSION) +endif() + +if(MZ_OPENSSL) + # Check to see if openssl installation is present + find_package(PkgConfig) + if(PKGCONFIG_FOUND) + pkg_check_modules(OPENSSL openssl) + endif() + if(NOT OPENSSL_FOUND) + find_package(OpenSSL) + endif() + + if(OPENSSL_FOUND) + message(STATUS "Using OpenSSL ${OPENSSL_VERSION}") + + list(APPEND MINIZIP_SRC mz_crypt_openssl.c) + list(APPEND MINIZIP_LIB ${OPENSSL_LIBRARIES}) + list(APPEND MINIZIP_LBD ${OPENSSL_LIBRARY_DIRS}) + list(APPEND MINIZIP_INC ${OPENSSL_INCLUDE_DIR}) + + if(OPENSSL_INCLUDE_DIRS) + list(APPEND MINIZIP_INC ${OPENSSL_INCLUDE_DIRS}) + endif() + else() + message(STATUS "OpenSSL library not found") + + set(MZ_OPENSSL OFF) + endif() +endif() + +# Windows specific +if(WIN32) + list(APPEND MINIZIP_DEF -D_CRT_SECURE_NO_DEPRECATE) + list(APPEND MINIZIP_SRC mz_os_win32.c mz_strm_os_win32.c) + + if(MZ_PKCRYPT OR MZ_WZAES OR MZ_SIGNING) + if(MZ_OPENSSL) + list(APPEND MINIZIP_DEP_PKG OpenSSL) + else() + message(STATUS "Using CryptoAPI") + + list(APPEND MINIZIP_SRC mz_crypt_win32.c) + list(APPEND MINIZIP_LIB crypt32.lib) + endif() + else() + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_CRYPTO) + endif() +endif() +if(CMAKE_SYSTEM_NAME STREQUAL "WindowsStore") + list(APPEND MINIZIP_DEF -DMZ_WINRT_API) +endif() + +# Unix specific +if(UNIX) + list(APPEND STDLIB_DEF -D_POSIX_C_SOURCE=200112L) + list(APPEND MINIZIP_SRC mz_os_posix.c mz_strm_os_posix.c) + + if(MZ_PKCRYPT OR MZ_WZAES OR MZ_SIGNING) + if(MZ_OPENSSL) + list(APPEND MINIZIP_DEP_PKG OpenSSL) + else() + if(APPLE) + message(STATUS "Using CoreFoundation Framework") + find_library(COREFOUNDATION_LIBRARY CoreFoundation) + + list(APPEND MINIZIP_LIB ${COREFOUNDATION_LIBRARY}) + + message(STATUS "Using Security Framework") + find_library(SECURITY_LIBRARY Security) + + list(APPEND MINIZIP_LIB ${SECURITY_LIBRARY}) + list(APPEND MINIZIP_LFG "-Wl,-F/Library/Frameworks") + + check_include_file(CommonCrypto/CommonCrypto.h COMMONCRYPTO_FOUND) + if(COMMONCRYPTO_FOUND) + message(STATUS "Using CommonCrypto") + + list(APPEND MINIZIP_SRC mz_crypt_apple.c) + + set(MZ_LIBBSD OFF) + else() + message(STATUS "CommonCrypto library not found") + + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_CRYPTO) + + if(MZ_WZAES) + message(STATUS "WinZIP AES support requires CommonCrypto or OpenSSL") + + set(MZ_WZAES OFF) + endif() + if(MZ_SIGNING) + message(STATUS "Signing support requires CommonCrypto or OpenSSL") + + set(MZ_SIGNING OFF) + endif() + endif() + else() + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_CRYPTO) + + if(MZ_WZAES) + message(STATUS "WinZIP AES support requires OpenSSL") + + set(MZ_WZAES OFF) + endif() + if(MZ_SIGNING) + message(STATUS "Signing support requires OpenSSL") + + set(MZ_SIGNING OFF) + endif() + endif() + + if(MZ_PKCRYPT AND NOT MZ_WZAES) + # Check to see which random generation functions we have + check_symbol_exists("getrandom" "sys/random.h" HAVE_GETRANDOM) + if(HAVE_GETRANDOM) + list(APPEND MINIZIP_DEF -DHAVE_GETRANDOM) + endif() + check_symbol_exists("arc4random_buf" "stdlib.h" HAVE_ARC4RANDOM_BUF) + if(HAVE_ARC4RANDOM_BUF) + list(APPEND MINIZIP_DEF -DHAVE_ARC4RANDOM_BUF) + else() + check_symbol_exists("arc4random" "stdlib.h" HAVE_ARC4RANDOM) + if(HAVE_ARC4RANDOM) + list(APPEND MINIZIP_DEF -DHAVE_ARC4RANDOM) + endif() + endif() + + if(APPLE) + # Requires _DARWIN_C_SOURCE for arcrandom functions + list(APPEND MINIZIP_DEF -D_DARWIN_C_SOURCE) + endif() + + if(MZ_LIBBSD AND NOT HAVE_ARC4RANDOM_BUF) + find_package(PkgConfig REQUIRED) + + pkg_check_modules(LIBBSD libbsd) + if(LIBBSD_FOUND) + check_library_exists("${LIBBSD_LIBRARIES}" "arc4random_buf" + "${LIBBSD_LIBRARY_DIRS}" HAVE_LIBBSD_ARC4RANDOM_BUF) + + if(HAVE_LIBBSD_ARC4RANDOM_BUF) + list(APPEND MINIZIP_DEF -DHAVE_LIBBSD -DHAVE_ARC4RANDOM_BUF) + list(APPEND MINIZIP_INC ${LIBBSD_INCLUDE_DIRS}) + list(APPEND MINIZIP_LIB ${LIBBSD_LIBRARIES}) + list(APPEND MINIZIP_LBD ${LIBBSD_LIBRARY_DIRS}) + + link_directories(${LIBBSD_LIBRARY_DIRS}) + endif() + else() + set(MZ_LIBBSD OFF) + endif() + else() + set(MZ_LIBBSD OFF) + endif() + + if(NOT MZ_LIBBSD AND NOT HAVE_GETRANDOM AND NOT HAVE_ARC4RANDOM_BUF AND NOT HAVE_ARC4RANDOM) + message(WARNING "Low quality entropy function used for encryption") + endif() + endif() + endif() + else() + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_CRYPTO) + endif() + + # Iconv is only necessary when it is not already built-in + # FindIconv requires cmake 3.11 or higher + if (MZ_ICONV) + find_package(Iconv QUIET) + endif() + + if(Iconv_FOUND) + message(STATUS "Using Iconv") + + list(APPEND MINIZIP_DEF -DHAVE_ICONV) + list(APPEND MINIZIP_INC ${Iconv_INCLUDE_DIRS}) + list(APPEND MINIZIP_DEP_PKG Iconv) + if(NOT Iconv_IS_BUILT_IN) + list(APPEND MINIZIP_LIB ${Iconv_LIBRARIES}) + list(APPEND MINIZIP_LBD ${Iconv_LIBRARY_DIRS}) + endif() + + set(PC_PRIVATE_LIBS "${PC_PRIVATE_LIBS} -liconv") + else() + message(STATUS "Character encoding support requires iconv") + + set(MZ_ICONV OFF) + endif() +else() + set(MZ_LIBBSD OFF) + set(MZ_ICONV OFF) +endif() + +# Setup predefined macros +if(MZ_COMPRESS_ONLY) + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_DECOMPRESSION) +endif() +if(MZ_DECOMPRESS_ONLY) + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_COMPRESSION) +endif() +if(NOT MZ_PKCRYPT AND NOT MZ_WZAES) + list(APPEND MINIZIP_DEF -DMZ_ZIP_NO_ENCRYPTION) +endif() +if(MZ_SIGNING) + list(APPEND MINIZIP_DEF -DMZ_ZIP_SIGNING) +endif() +if(MZ_FILE32_API) + list(APPEND MINIZIP_DEF -DMZ_FILE32_API) +endif() + +# Include traditional PKWare encryption +if(MZ_PKCRYPT) + list(APPEND MINIZIP_DEF -DHAVE_PKCRYPT) + list(APPEND MINIZIP_SRC mz_strm_pkcrypt.c) + list(APPEND MINIZIP_HDR mz_strm_pkcrypt.h) +endif() + +# Include WinZIP AES encryption +if(MZ_WZAES) + list(APPEND MINIZIP_DEF -DHAVE_WZAES) + list(APPEND MINIZIP_SRC mz_strm_wzaes.c) + list(APPEND MINIZIP_HDR mz_strm_wzaes.h) +endif() + +# Include compatibility layer +if(MZ_COMPAT) + set(COMPAT_HEADER "\ +/* file.h -- Compatibility layer shim\n\ + part of the minizip-ng project\n\n\ + This program is distributed under the terms of the same license as zlib.\n\ + See the accompanying LICENSE file for the full text of the license.\n\ +*/\n\n\ +#ifndef MZ_COMPAT_FILE\n\ +#define MZ_COMPAT_FILE\n\n\ +#include \"mz_compat.h\"\n\n\ +#endif\n") + + string(REPLACE "file.h" "zip.h" ZIP_COMPAT_HEADER ${COMPAT_HEADER}) + string(REPLACE "MZ_COMPAT_FILE" "MZ_COMPAT_ZIP" ZIP_COMPAT_HEADER ${ZIP_COMPAT_HEADER}) + file(WRITE "zip.h" ${ZIP_COMPAT_HEADER}) + + string(REPLACE "file.h" "unzip.h" UNZIP_COMPAT_HEADER ${COMPAT_HEADER}) + string(REPLACE "MZ_COMPAT_FILE" "MZ_COMPAT_UNZIP" UNZIP_COMPAT_HEADER ${UNZIP_COMPAT_HEADER}) + file(WRITE "unzip.h" ${UNZIP_COMPAT_HEADER}) + + if(MZ_COMPAT_VERSION) + list(APPEND MINIZIP_DEF -DMZ_COMPAT_VERSION=${MZ_COMPAT_VERSION}) + endif() + list(APPEND MINIZIP_SRC mz_compat.c) + list(APPEND MINIZIP_HDR mz_compat.h zip.h unzip.h) +endif() + +# Set compiler options +if(MZ_CODE_COVERAGE) + if(NOT MSVC) + message(STATUS "Code coverage enabled") + add_compile_options(-O0 -g -fprofile-arcs -ftest-coverage) + if(CMAKE_C_COMPILER_ID MATCHES "(Apple)?[Cc]lang") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage") + elseif(CMAKE_C_COMPILER_ID MATCHES "GNU") + link_libraries(gcov) + endif() + set_property(DIRECTORY PROPERTY + GCC_INSTRUMENT_PROGRAM_FLOW_ARCS YES + GCC_GENERATE_TEST_COVERAGE_FILES YES) + else() + set(MZ_CODE_COVERAGE OFF) + endif() +else() + if(MSVC) + add_compile_options( + $<$:/Zi> + $<$:/Od> + $<$:/W3> + $<$:/Ox> + $<$:/Os>) + else() + add_compile_options( + $<$:-g> + $<$:-Wall> + $<$:-Os>) + endif() +endif() + +list(APPEND MINIZIP_INC ${CMAKE_CURRENT_SOURCE_DIR}) + +# Create minizip library +project(minizip${MZ_PROJECT_SUFFIX} VERSION ${VERSION}) + +if(NOT ${MZ_PROJECT_SUFFIX} STREQUAL "") + message(STATUS "Project configured as ${PROJECT_NAME}") +endif() + +set(MINIZIP_PC ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc) +configure_file(minizip.pc.cmakein ${MINIZIP_PC} @ONLY) + +set(INSTALL_CMAKE_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" + CACHE PATH "Installation directory for cmake files.") +set(INSTALL_PKGCONFIG_DIR "${CMAKE_INSTALL_LIBDIR}/pkgconfig" + CACHE PATH "Installation directory for pkgconfig (.pc) files") + +add_library(${PROJECT_NAME} ${MINIZIP_SRC} ${MINIZIP_HDR}) + +set_target_properties(${PROJECT_NAME} PROPERTIES + VERSION ${VERSION} + SOVERSION ${SOVERSION} + LINKER_LANGUAGE C + DEFINE_SYMBOL "MZ_EXPORTS") + +if(MINIZIP_LFG) + set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS ${MINIZIP_LFG}) +endif() +if(MSVC) + # VS debugger has problems when executable and static library are named the same + set_target_properties(${PROJECT_NAME} PROPERTIES OUTPUT_NAME lib${PROJECT_NAME}) +endif() +if(NOT RISCOS) + set_target_properties(${PROJECT_NAME} PROPERTIES POSITION_INDEPENDENT_CODE 1) +endif() +if(MZ_LZMA) + set_target_properties(${PROJECT_NAME} PROPERTIES C_STANDARD 99) +endif() + +target_link_libraries(${PROJECT_NAME} PUBLIC ${MINIZIP_LIB} ${MINIZIP_DEP}) +target_link_directories(${PROJECT_NAME} PUBLIC ${MINIZIP_LBD}) +target_compile_definitions(${PROJECT_NAME} PRIVATE ${STDLIB_DEF} ${MINIZIP_DEF}) +target_include_directories(${PROJECT_NAME} PRIVATE ${MINIZIP_INC}) +target_include_directories(${PROJECT_NAME} PUBLIC + $ + $) + +# Create minizip alias +add_library(MINIZIP::${PROJECT_NAME} ALIAS ${PROJECT_NAME}) + +# Install files +if(NOT SKIP_INSTALL_LIBRARIES AND NOT SKIP_INSTALL_ALL) + install(TARGETS ${PROJECT_NAME} ${MINIZIP_DEP} + EXPORT ${PROJECT_NAME} + INCLUDES DESTINATION "${INSTALL_INC_DIR}" + RUNTIME DESTINATION "${INSTALL_BIN_DIR}" + ARCHIVE DESTINATION "${INSTALL_LIB_DIR}" + LIBRARY DESTINATION "${INSTALL_LIB_DIR}") + install(EXPORT ${PROJECT_NAME} + DESTINATION "${INSTALL_CMAKE_DIR}" + NAMESPACE "MINIZIP::") + + # Create and install CMake package config version file to allow find_package() + include(CMakePackageConfigHelpers) + write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake + COMPATIBILITY SameMajorVersion) + set(MINIZIP_CONFIG_CONTENT "@PACKAGE_INIT@\n") + if(MINIZIP_DEP_PKG) + string(APPEND MINIZIP_CONFIG_CONTENT "include(CMakeFindDependencyMacro)\n") + foreach(PKG_NAME ${MINIZIP_DEP_PKG}) + string(APPEND MINIZIP_CONFIG_CONTENT "find_dependency(${PKG_NAME} REQUIRED)\n") + endforeach() + endif() + string(APPEND MINIZIP_CONFIG_CONTENT "include(\"\${CMAKE_CURRENT_LIST_DIR}/${PROJECT_NAME}.cmake\")") + file(WRITE minizip-config.cmake.in ${MINIZIP_CONFIG_CONTENT}) + + # Create config for find_package() + configure_package_config_file(minizip-config.cmake.in ${PROJECT_NAME}-config.cmake + INSTALL_DESTINATION "${INSTALL_CMAKE_DIR}") + + file(REMOVE minizip-config.cmake.in) + + install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config.cmake + DESTINATION "${INSTALL_CMAKE_DIR}") +endif() +if(NOT SKIP_INSTALL_HDR AND NOT SKIP_INSTALL_ALL) + install(FILES ${MINIZIP_HDR} DESTINATION "${INSTALL_INC_DIR}") +endif() +if(NOT SKIP_INSTALL_FILES AND NOT SKIP_INSTALL_ALL) + install(FILES ${MINIZIP_PC} DESTINATION "${INSTALL_PKGCONFIG_DIR}") +endif() + +# Build test executables +if(MZ_BUILD_TESTS) + if(MZ_ZLIB AND NOT MZ_LIBCOMP) + add_executable(minigzip_cmd minigzip.c) + set_target_properties(minigzip_cmd PROPERTIES OUTPUT_NAME minigzip) + target_compile_definitions(minigzip_cmd PRIVATE ${STDLIB_DEF} ${MINIZIP_DEF}) + target_include_directories(minigzip_cmd PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) + target_link_libraries(minigzip_cmd ${PROJECT_NAME}) + + if(NOT SKIP_INSTALL_BINARIES AND NOT SKIP_INSTALL_ALL) + install(TARGETS minigzip_cmd RUNTIME DESTINATION "bin") + endif() + endif() + + add_executable(minizip_cmd minizip.c) + set_target_properties(minizip_cmd PROPERTIES OUTPUT_NAME ${PROJECT_NAME}) + target_compile_definitions(minizip_cmd PRIVATE ${STDLIB_DEF} ${MINIZIP_DEF}) + target_include_directories(minizip_cmd PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) + target_link_libraries(minizip_cmd ${PROJECT_NAME}) + + if(NOT SKIP_INSTALL_BINARIES AND NOT SKIP_INSTALL_ALL) + install(TARGETS minizip_cmd RUNTIME DESTINATION "bin") + endif() + + add_executable(test_cmd test/test.c test/test.h) + target_compile_definitions(test_cmd PRIVATE ${STDLIB_DEF} ${MINIZIP_DEF}) + if(MZ_COMPAT) + target_compile_definitions(test_cmd PRIVATE -DHAVE_COMPAT) + endif() + target_include_directories(test_cmd PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) + target_link_libraries(test_cmd ${PROJECT_NAME}) +endif() + +if(MZ_BUILD_TESTS AND MZ_BUILD_UNIT_TESTS) + enable_testing() + + # Can't disable zlib testing so ctest tries to run zlib example app + if(MZ_ZLIB AND NOT MZ_LIBCOMP AND NOT ZLIB_FOUND) + add_dependencies(${PROJECT_NAME} example) + if(HAVE_OFF64_T) + add_dependencies(${PROJECT_NAME} example64) + endif() + endif() + + add_test(NAME test_cmd COMMAND test_cmd WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) + + function(create_compress_tests EXTRA_NAME EXTRA_ARGS) + if(MZ_DECOMPRESS_ONLY) + return() + endif() + list(FIND EXTRA_ARGS "-z" ZIPCD_IDX) + if(${ZIPCD_IDX} EQUAL -1) + set(COMPRESS_METHOD_NAMES "raw") + set(COMPRESS_METHOD_ARGS "-0") + endif() + if(MZ_ZLIB OR MZ_LIBCOMP) + list(APPEND COMPRESS_METHOD_NAMES "deflate") + list(APPEND COMPRESS_METHOD_ARGS "-9") + endif() + if(MZ_BZIP2) + list(APPEND COMPRESS_METHOD_NAMES "bzip2") + list(APPEND COMPRESS_METHOD_ARGS "-b") + endif() + if(MZ_LZMA) + list(APPEND COMPRESS_METHOD_NAMES "lzma") + list(APPEND COMPRESS_METHOD_ARGS "-m") + endif() + if(MZ_LZMA OR MZ_LIBCOMP) + list(APPEND COMPRESS_METHOD_NAMES "xz") + list(APPEND COMPRESS_METHOD_ARGS "-n") + endif() + if(MZ_ZSTD) + list(APPEND COMPRESS_METHOD_NAMES "zstd") + list(APPEND COMPRESS_METHOD_ARGS "-t") + endif() + list(LENGTH COMPRESS_METHOD_NAMES COMPRESS_METHOD_COUNT) + math(EXPR COMPRESS_METHOD_COUNT "${COMPRESS_METHOD_COUNT}-1") + foreach(INDEX RANGE ${COMPRESS_METHOD_COUNT}) + list(GET COMPRESS_METHOD_NAMES ${INDEX} COMPRESS_METHOD_NAME) + list(GET COMPRESS_METHOD_ARGS ${INDEX} COMPRESS_METHOD_ARG) + add_test(NAME ${COMPRESS_METHOD_NAME}-zip-${EXTRA_NAME} + COMMAND minizip_cmd ${COMPRESS_METHOD_ARG} -o ${EXTRA_ARGS} + result.zip test.c test.h empty.txt random.bin uniform.bin fuzz + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + add_test(NAME ${COMPRESS_METHOD_NAME}-list-${EXTRA_NAME} + COMMAND minizip_cmd -l ${EXTRA_ARGS} result.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + if(NOT MZ_COMPRESS_ONLY) + add_test(NAME ${COMPRESS_METHOD_NAME}-unzip-${EXTRA_NAME} + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out result.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + add_test(NAME ${COMPRESS_METHOD_NAME}-append-${EXTRA_NAME} + COMMAND minizip_cmd ${COMPRESS_METHOD_ARG} -a ${EXTRA_ARGS} + result.zip single.txt + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + if(NOT MZ_COMPRESS_ONLY) + add_test(NAME ${COMPRESS_METHOD_NAME}-append-unzip-${EXTRA_NAME} + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out result.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + add_test(NAME ${COMPRESS_METHOD_NAME}-erase-${EXTRA_NAME} + COMMAND minizip_cmd -o -e result.zip test.c test.h + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + if(NOT MZ_COMPRESS_ONLY) + add_test(NAME ${COMPRESS_METHOD_NAME}-erase-unzip-${EXTRA_NAME} + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out result.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + endforeach() + endfunction() + + # Perform tests against ourself + create_compress_tests("generic" "") + create_compress_tests("span" "-k;1024") + create_compress_tests("zipcd" "-z") + if(MZ_PKCRYPT) + create_compress_tests("pkcrypt" "-p;test123") + endif() + if(MZ_WZAES) + create_compress_tests("wzaes" "-s;-p;test123") + endif() + if(MZ_SIGNING) + create_compress_tests("signed" "-h;test.p12;-w;test") + create_compress_tests("secure" "-z;-h;test.p12;-w;test") + endif() + + # Perform tests on others + if(NOT MZ_COMPRESS_ONLY) + if(MZ_ZLIB OR MZ_LIBCOMP) + add_test(NAME unzip-tiny + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out + fuzz/unzip_fuzzer_seed_corpus/tiny.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + if(MZ_BZIP2) + add_test(NAME unzip-bzip2 + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out + fuzz/unzip_fuzzer_seed_corpus/bzip2.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + if(MZ_LZMA) + add_test(NAME unzip-lzma + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out + fuzz/unzip_fuzzer_seed_corpus/lzma.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + if(MZ_PKCRYPT) + add_test(NAME unzip-pkcrypt + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out -p test123 + fuzz/unzip_fuzzer_seed_corpus/encrypted_pkcrypt.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + if(MZ_WZAES) + add_test(NAME unzip-wzaes + COMMAND minizip_cmd -x -o ${EXTRA_ARGS} -d out -p test123 + fuzz/unzip_fuzzer_seed_corpus/encrypted_wzaes.zip + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + endif() + if(NOT MZ_COMPRESS_ONLY AND NOT MZ_DECOMPRESS_ONLY) + if(MZ_ZLIB AND NOT MZ_LIBCOMP) + add_test(NAME gz + COMMAND minigzip_cmd random.bin + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + add_test(NAME ungz + COMMAND minigzip_cmd -x -d out random.bin.gz + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/test) + endif() + endif() +endif() + +#Build fuzzer executables +if(MZ_BUILD_FUZZ_TESTS) + if(CMAKE_C_COMPILER_ID MATCHES "Clang") + enable_language(CXX) + + if(DEFINED ENV{LIB_FUZZING_ENGINE}) + set(FUZZING_ENGINE $ENV{LIB_FUZZING_ENGINE}) + set(FUZZING_ENGINE_FOUND TRUE) + else() + find_library(FUZZING_ENGINE "FuzzingEngine") + endif() + endif() + + if(NOT FUZZING_ENGINE_FOUND) + set(FUZZER_SRC "test/fuzz/standalone.c") + else() + set(FUZZER_SRC) + endif() + + macro(configure_fuzz_test target) + add_executable(${target} "test/fuzz/${target}.c" ${FUZZER_SRC}) + set_target_properties(${target} PROPERTIES LINKER_LANGUAGE CXX) + target_compile_definitions(${target} PRIVATE ${STDLIB_DEF}) + target_include_directories(${target} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) + target_link_libraries(${target} ${PROJECT_NAME}) + + add_dependencies(${target} ${PROJECT_NAME}) + if(FUZZING_ENGINE_FOUND) + target_link_libraries(${target} ${FUZZING_ENGINE}) + endif() + endmacro() + + configure_fuzz_test(zip_fuzzer) + configure_fuzz_test(unzip_fuzzer) + + if(NOT SKIP_INSTALL_BINARIES AND NOT SKIP_INSTALL_ALL) + install(TARGETS zip_fuzzer RUNTIME DESTINATION "bin") + install(TARGETS unzip_fuzzer RUNTIME DESTINATION "bin") + endif() +endif() + +# Compatibility options +add_feature_info(MZ_COMPAT MZ_COMPAT "Enables compatibility layer") +# Compression library options +add_feature_info(MZ_ZLIB MZ_ZLIB "Enables ZLIB compression") +add_feature_info(MZ_BZIP2 MZ_BZIP2 "Enables BZIP2 compression") +add_feature_info(MZ_LZMA MZ_LZMA "Enables LZMA & XZ compression") +add_feature_info(MZ_ZSTD MZ_ZSTD "Enables ZSTD compression") +add_feature_info(MZ_LIBCOMP MZ_LIBCOMP "Enables Apple compression") +add_feature_info(MZ_FETCH_LIBS MZ_FETCH_LIBS "Enables fetching third-party libraries if not found") +add_feature_info(MZ_FORCE_FETCH_LIBS MZ_FORCE_FETCH_LIBS "Enables fetching third-party libraries always") +# Encryption support options +add_feature_info(MZ_PKCRYPT MZ_PKCRYPT "Enables PKWARE traditional encryption") +add_feature_info(MZ_WZAES MZ_WZAES "Enables WinZIP AES encryption") +add_feature_info(MZ_OPENSSL MZ_OPENSSL "Enables OpenSSL for encryption") +add_feature_info(MZ_LIBBSD MZ_LIBBSD "Build with libbsd for crypto random") +add_feature_info(MZ_SIGNING MZ_SIGNING "Enables zip signing support") +# Character conversion options +add_feature_info(MZ_ICONV MZ_ICONV "Enables iconv string encoding conversion library") +# Code generation options +add_feature_info(MZ_COMPRESS_ONLY MZ_COMPRESS_ONLY "Only support compression") +add_feature_info(MZ_DECOMPRESS_ONLY MZ_DECOMPRESS_ONLY "Only support decompression") +add_feature_info(MZ_FILE32_API MZ_FILE32_API "Builds using posix 32-bit file api") +# Build and continuous integration options +add_feature_info(MZ_BUILD_TESTS MZ_BUILD_TESTS "Builds minizip test executable") +add_feature_info(MZ_BUILD_UNIT_TESTS MZ_BUILD_UNIT_TESTS "Builds minizip unit test project") +add_feature_info(MZ_BUILD_FUZZ_TESTS MZ_BUILD_FUZZ_TESTS "Builds minizip fuzzer executables") +add_feature_info(MZ_CODE_COVERAGE MZ_CODE_COVERAGE "Builds with code coverage flags") + +feature_summary(WHAT ENABLED_FEATURES DISABLED_FEATURES INCLUDE_QUIET_PACKAGES) diff --git a/Externals/minizip/LICENSE b/Externals/minizip/LICENSE index 086295a2b2af..3b6c4e142ebb 100644 --- a/Externals/minizip/LICENSE +++ b/Externals/minizip/LICENSE @@ -14,4 +14,4 @@ freely, subject to the following restrictions: appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. -3. This notice may not be removed or altered from any source distribution. \ No newline at end of file +3. This notice may not be removed or altered from any source distribution. diff --git a/Externals/minizip/README b/Externals/minizip/README index 8d311e116dd5..4b929d1504f3 100644 --- a/Externals/minizip/README +++ b/Externals/minizip/README @@ -1,2 +1,3 @@ -Partial import from https://github.com/nmoinvaz/minizip -Revision: b3713abe7722f1b779e7cdbd527c6208abbe1121 +minizip-ng, Version 3.0.4 (2021-11-29) +https://github.com/zlib-ng/minizip-ng +Revision: 95987e98b4862c055b8cf91d6e7ce5f9153ddc24 diff --git a/Externals/minizip/ioapi.c b/Externals/minizip/ioapi.c deleted file mode 100644 index bba26334dc9e..000000000000 --- a/Externals/minizip/ioapi.c +++ /dev/null @@ -1,351 +0,0 @@ -/* ioapi.c -- IO base function header for compress/uncompress .zip - part of the MiniZip project - - Copyright (C) 1998-2010 Gilles Vollant - http://www.winimage.com/zLibDll/minizip.html - Modifications for Zip64 support - Copyright (C) 2009-2010 Mathias Svensson - http://result42.com - - This program is distributed under the terms of the same license as zlib. - See the accompanying LICENSE file for the full text of the license. -*/ - -#include -#include - -#if defined unix || defined __APPLE__ -#include -#include -#endif - -#include "ioapi.h" - -#if defined(_WIN32) -# define snprintf _snprintf -#endif - -voidpf call_zopen64(const zlib_filefunc64_32_def *pfilefunc, const void *filename, int mode) -{ - if (pfilefunc->zfile_func64.zopen64_file != NULL) - return (*(pfilefunc->zfile_func64.zopen64_file)) (pfilefunc->zfile_func64.opaque, filename, mode); - return (*(pfilefunc->zopen32_file))(pfilefunc->zfile_func64.opaque, (const char*)filename, mode); -} - -voidpf call_zopendisk64(const zlib_filefunc64_32_def *pfilefunc, voidpf filestream, uint32_t number_disk, int mode) -{ - if (pfilefunc->zfile_func64.zopendisk64_file != NULL) - return (*(pfilefunc->zfile_func64.zopendisk64_file)) (pfilefunc->zfile_func64.opaque, filestream, number_disk, mode); - return (*(pfilefunc->zopendisk32_file))(pfilefunc->zfile_func64.opaque, filestream, number_disk, mode); -} - -long call_zseek64(const zlib_filefunc64_32_def *pfilefunc, voidpf filestream, uint64_t offset, int origin) -{ - uint32_t offset_truncated = 0; - if (pfilefunc->zfile_func64.zseek64_file != NULL) - return (*(pfilefunc->zfile_func64.zseek64_file)) (pfilefunc->zfile_func64.opaque,filestream,offset,origin); - offset_truncated = (uint32_t)offset; - if (offset_truncated != offset) - return -1; - return (*(pfilefunc->zseek32_file))(pfilefunc->zfile_func64.opaque,filestream, offset_truncated, origin); -} - -uint64_t call_ztell64(const zlib_filefunc64_32_def *pfilefunc, voidpf filestream) -{ - uint64_t position; - if (pfilefunc->zfile_func64.zseek64_file != NULL) - return (*(pfilefunc->zfile_func64.ztell64_file)) (pfilefunc->zfile_func64.opaque, filestream); - position = (*(pfilefunc->ztell32_file))(pfilefunc->zfile_func64.opaque, filestream); - if ((position) == UINT32_MAX) - return (uint64_t)-1; - return position; -} - -void fill_zlib_filefunc64_32_def_from_filefunc32(zlib_filefunc64_32_def *p_filefunc64_32, const zlib_filefunc_def *p_filefunc32) -{ - p_filefunc64_32->zfile_func64.zopen64_file = NULL; - p_filefunc64_32->zfile_func64.zopendisk64_file = NULL; - p_filefunc64_32->zopen32_file = p_filefunc32->zopen_file; - p_filefunc64_32->zopendisk32_file = p_filefunc32->zopendisk_file; - p_filefunc64_32->zfile_func64.zerror_file = p_filefunc32->zerror_file; - p_filefunc64_32->zfile_func64.zread_file = p_filefunc32->zread_file; - p_filefunc64_32->zfile_func64.zwrite_file = p_filefunc32->zwrite_file; - p_filefunc64_32->zfile_func64.ztell64_file = NULL; - p_filefunc64_32->zfile_func64.zseek64_file = NULL; - p_filefunc64_32->zfile_func64.zclose_file = p_filefunc32->zclose_file; - p_filefunc64_32->zfile_func64.zerror_file = p_filefunc32->zerror_file; - p_filefunc64_32->zfile_func64.opaque = p_filefunc32->opaque; - p_filefunc64_32->zseek32_file = p_filefunc32->zseek_file; - p_filefunc64_32->ztell32_file = p_filefunc32->ztell_file; -} - -static voidpf ZCALLBACK fopen_file_func(voidpf opaque, const char *filename, int mode); -static uint32_t ZCALLBACK fread_file_func(voidpf opaque, voidpf stream, void* buf, uint32_t size); -static uint32_t ZCALLBACK fwrite_file_func(voidpf opaque, voidpf stream, const void *buf, uint32_t size); -static uint64_t ZCALLBACK ftell64_file_func(voidpf opaque, voidpf stream); -static long ZCALLBACK fseek64_file_func(voidpf opaque, voidpf stream, uint64_t offset, int origin); -static int ZCALLBACK fclose_file_func(voidpf opaque, voidpf stream); -static int ZCALLBACK ferror_file_func(voidpf opaque, voidpf stream); - -typedef struct -{ - FILE *file; - int filenameLength; - void *filename; -} FILE_IOPOSIX; - -static voidpf file_build_ioposix(FILE *file, const char *filename) -{ - FILE_IOPOSIX *ioposix = NULL; - if (file == NULL) - return NULL; - ioposix = (FILE_IOPOSIX*)malloc(sizeof(FILE_IOPOSIX)); - ioposix->file = file; - ioposix->filenameLength = (int)strlen(filename) + 1; - ioposix->filename = (char*)malloc(ioposix->filenameLength * sizeof(char)); - strncpy((char*)ioposix->filename, filename, ioposix->filenameLength); - return (voidpf)ioposix; -} - -static voidpf ZCALLBACK fopen_file_func(voidpf opaque, const char *filename, int mode) -{ - FILE* file = NULL; - const char *mode_fopen = NULL; - if ((mode & ZLIB_FILEFUNC_MODE_READWRITEFILTER) == ZLIB_FILEFUNC_MODE_READ) - mode_fopen = "rb"; - else if (mode & ZLIB_FILEFUNC_MODE_EXISTING) - mode_fopen = "r+b"; - else if (mode & ZLIB_FILEFUNC_MODE_CREATE) - mode_fopen = "wb"; - - if ((filename != NULL) && (mode_fopen != NULL)) - { - file = fopen(filename, mode_fopen); - return file_build_ioposix(file, filename); - } - return file; -} - -static voidpf ZCALLBACK fopen64_file_func(voidpf opaque, const void *filename, int mode) -{ - FILE* file = NULL; - const char *mode_fopen = NULL; - if ((mode & ZLIB_FILEFUNC_MODE_READWRITEFILTER) == ZLIB_FILEFUNC_MODE_READ) - mode_fopen = "rb"; - else if (mode & ZLIB_FILEFUNC_MODE_EXISTING) - mode_fopen = "r+b"; - else if (mode & ZLIB_FILEFUNC_MODE_CREATE) - mode_fopen = "wb"; - - if ((filename != NULL) && (mode_fopen != NULL)) - { - file = fopen64((const char*)filename, mode_fopen); - return file_build_ioposix(file, (const char*)filename); - } - return file; -} - -static voidpf ZCALLBACK fopendisk64_file_func(voidpf opaque, voidpf stream, uint32_t number_disk, int mode) -{ - FILE_IOPOSIX *ioposix = NULL; - char *diskFilename = NULL; - voidpf ret = NULL; - int i = 0; - - if (stream == NULL) - return NULL; - ioposix = (FILE_IOPOSIX*)stream; - diskFilename = (char*)malloc(ioposix->filenameLength * sizeof(char)); - strncpy(diskFilename, (const char*)ioposix->filename, ioposix->filenameLength); - for (i = ioposix->filenameLength - 1; i >= 0; i -= 1) - { - if (diskFilename[i] != '.') - continue; - snprintf(&diskFilename[i], ioposix->filenameLength - i, ".z%02u", number_disk + 1); - break; - } - if (i >= 0) - ret = fopen64_file_func(opaque, diskFilename, mode); - free(diskFilename); - return ret; -} - -static voidpf ZCALLBACK fopendisk_file_func(voidpf opaque, voidpf stream, uint32_t number_disk, int mode) -{ - FILE_IOPOSIX *ioposix = NULL; - char *diskFilename = NULL; - voidpf ret = NULL; - int i = 0; - - if (stream == NULL) - return NULL; - ioposix = (FILE_IOPOSIX*)stream; - diskFilename = (char*)malloc(ioposix->filenameLength * sizeof(char)); - strncpy(diskFilename, (const char*)ioposix->filename, ioposix->filenameLength); - for (i = ioposix->filenameLength - 1; i >= 0; i -= 1) - { - if (diskFilename[i] != '.') - continue; - snprintf(&diskFilename[i], ioposix->filenameLength - i, ".z%02u", number_disk + 1); - break; - } - if (i >= 0) - ret = fopen_file_func(opaque, diskFilename, mode); - free(diskFilename); - return ret; -} - -static uint32_t ZCALLBACK fread_file_func(voidpf opaque, voidpf stream, void* buf, uint32_t size) -{ - FILE_IOPOSIX *ioposix = NULL; - uint32_t read = (uint32_t)-1; - if (stream == NULL) - return read; - ioposix = (FILE_IOPOSIX*)stream; - read = (uint32_t)fread(buf, 1, (size_t)size, ioposix->file); - return read; -} - -static uint32_t ZCALLBACK fwrite_file_func(voidpf opaque, voidpf stream, const void *buf, uint32_t size) -{ - FILE_IOPOSIX *ioposix = NULL; - uint32_t written = (uint32_t)-1; - if (stream == NULL) - return written; - ioposix = (FILE_IOPOSIX*)stream; - written = (uint32_t)fwrite(buf, 1, (size_t)size, ioposix->file); - return written; -} - -static long ZCALLBACK ftell_file_func(voidpf opaque, voidpf stream) -{ - FILE_IOPOSIX *ioposix = NULL; - long ret = -1; - if (stream == NULL) - return ret; - ioposix = (FILE_IOPOSIX*)stream; - ret = ftell(ioposix->file); - return ret; -} - -static uint64_t ZCALLBACK ftell64_file_func(voidpf opaque, voidpf stream) -{ - FILE_IOPOSIX *ioposix = NULL; - uint64_t ret = (uint64_t)-1; - if (stream == NULL) - return ret; - ioposix = (FILE_IOPOSIX*)stream; - ret = ftello64(ioposix->file); - return ret; -} - -static long ZCALLBACK fseek_file_func(voidpf opaque, voidpf stream, uint32_t offset, int origin) -{ - FILE_IOPOSIX *ioposix = NULL; - int fseek_origin = 0; - long ret = 0; - - if (stream == NULL) - return -1; - ioposix = (FILE_IOPOSIX*)stream; - - switch (origin) - { - case ZLIB_FILEFUNC_SEEK_CUR: - fseek_origin = SEEK_CUR; - break; - case ZLIB_FILEFUNC_SEEK_END: - fseek_origin = SEEK_END; - break; - case ZLIB_FILEFUNC_SEEK_SET: - fseek_origin = SEEK_SET; - break; - default: - return -1; - } - if (fseek(ioposix->file, offset, fseek_origin) != 0) - ret = -1; - return ret; -} - -static long ZCALLBACK fseek64_file_func(voidpf opaque, voidpf stream, uint64_t offset, int origin) -{ - FILE_IOPOSIX *ioposix = NULL; - int fseek_origin = 0; - long ret = 0; - - if (stream == NULL) - return -1; - ioposix = (FILE_IOPOSIX*)stream; - - switch (origin) - { - case ZLIB_FILEFUNC_SEEK_CUR: - fseek_origin = SEEK_CUR; - break; - case ZLIB_FILEFUNC_SEEK_END: - fseek_origin = SEEK_END; - break; - case ZLIB_FILEFUNC_SEEK_SET: - fseek_origin = SEEK_SET; - break; - default: - return -1; - } - - if (fseeko64(ioposix->file, offset, fseek_origin) != 0) - ret = -1; - - return ret; -} - -static int ZCALLBACK fclose_file_func(voidpf opaque, voidpf stream) -{ - FILE_IOPOSIX *ioposix = NULL; - int ret = -1; - if (stream == NULL) - return ret; - ioposix = (FILE_IOPOSIX*)stream; - if (ioposix->filename != NULL) - free(ioposix->filename); - ret = fclose(ioposix->file); - free(ioposix); - return ret; -} - -static int ZCALLBACK ferror_file_func(voidpf opaque, voidpf stream) -{ - FILE_IOPOSIX *ioposix = NULL; - int ret = -1; - if (stream == NULL) - return ret; - ioposix = (FILE_IOPOSIX*)stream; - ret = ferror(ioposix->file); - return ret; -} - -void fill_fopen_filefunc(zlib_filefunc_def *pzlib_filefunc_def) -{ - pzlib_filefunc_def->zopen_file = fopen_file_func; - pzlib_filefunc_def->zopendisk_file = fopendisk_file_func; - pzlib_filefunc_def->zread_file = fread_file_func; - pzlib_filefunc_def->zwrite_file = fwrite_file_func; - pzlib_filefunc_def->ztell_file = ftell_file_func; - pzlib_filefunc_def->zseek_file = fseek_file_func; - pzlib_filefunc_def->zclose_file = fclose_file_func; - pzlib_filefunc_def->zerror_file = ferror_file_func; - pzlib_filefunc_def->opaque = NULL; -} - -void fill_fopen64_filefunc(zlib_filefunc64_def *pzlib_filefunc_def) -{ - pzlib_filefunc_def->zopen64_file = fopen64_file_func; - pzlib_filefunc_def->zopendisk64_file = fopendisk64_file_func; - pzlib_filefunc_def->zread_file = fread_file_func; - pzlib_filefunc_def->zwrite_file = fwrite_file_func; - pzlib_filefunc_def->ztell64_file = ftell64_file_func; - pzlib_filefunc_def->zseek64_file = fseek64_file_func; - pzlib_filefunc_def->zclose_file = fclose_file_func; - pzlib_filefunc_def->zerror_file = ferror_file_func; - pzlib_filefunc_def->opaque = NULL; -} diff --git a/Externals/minizip/ioapi.h b/Externals/minizip/ioapi.h deleted file mode 100644 index efb94a445791..000000000000 --- a/Externals/minizip/ioapi.h +++ /dev/null @@ -1,146 +0,0 @@ -/* ioapi.h -- IO base function header for compress/uncompress .zip - part of the MiniZip project - - Copyright (C) 1998-2010 Gilles Vollant - http://www.winimage.com/zLibDll/minizip.html - Modifications for Zip64 support - Copyright (C) 2009-2010 Mathias Svensson - http://result42.com - - This program is distributed under the terms of the same license as zlib. - See the accompanying LICENSE file for the full text of the license. -*/ - -#ifndef _ZLIBIOAPI64_H -#define _ZLIBIOAPI64_H - -#include -#include -#include - -#include "zlib.h" - -#if defined(USE_FILE32API) -# define fopen64 fopen -# define ftello64 ftell -# define fseeko64 fseek -#else -# if defined(__FreeBSD__) || defined(__NetBSD__) || defined(__DragonFly__) || defined(__OpenBSD__) || defined(__APPLE__) -# define fopen64 fopen -# define ftello64 ftello -# define fseeko64 fseeko -# endif -# ifdef _MSC_VER -# define fopen64 fopen -# if (_MSC_VER >= 1400) && (!(defined(NO_MSCVER_FILE64_FUNC))) -# define ftello64 _ftelli64 -# define fseeko64 _fseeki64 -# else /* old MSC */ -# define ftello64 ftell -# define fseeko64 fseek -# endif -# endif -#endif - -#ifdef __cplusplus -extern "C" { -#endif - -#define ZLIB_FILEFUNC_SEEK_CUR (1) -#define ZLIB_FILEFUNC_SEEK_END (2) -#define ZLIB_FILEFUNC_SEEK_SET (0) - -#define ZLIB_FILEFUNC_MODE_READ (1) -#define ZLIB_FILEFUNC_MODE_WRITE (2) -#define ZLIB_FILEFUNC_MODE_READWRITEFILTER (3) -#define ZLIB_FILEFUNC_MODE_EXISTING (4) -#define ZLIB_FILEFUNC_MODE_CREATE (8) - -#ifndef ZCALLBACK -# if (defined(WIN32) || defined(_WIN32) || defined (WINDOWS) || \ - defined (_WINDOWS)) && defined(CALLBACK) && defined (USEWINDOWS_CALLBACK) -# define ZCALLBACK CALLBACK -# else -# define ZCALLBACK -# endif -#endif - -typedef voidpf (ZCALLBACK *open_file_func) (voidpf opaque, const char *filename, int mode); -typedef voidpf (ZCALLBACK *opendisk_file_func) (voidpf opaque, voidpf stream, uint32_t number_disk, int mode); -typedef uint32_t (ZCALLBACK *read_file_func) (voidpf opaque, voidpf stream, void* buf, uint32_t size); -typedef uint32_t (ZCALLBACK *write_file_func) (voidpf opaque, voidpf stream, const void *buf, uint32_t size); -typedef int (ZCALLBACK *close_file_func) (voidpf opaque, voidpf stream); -typedef int (ZCALLBACK *error_file_func) (voidpf opaque, voidpf stream); - -typedef long (ZCALLBACK *tell_file_func) (voidpf opaque, voidpf stream); -typedef long (ZCALLBACK *seek_file_func) (voidpf opaque, voidpf stream, uint32_t offset, int origin); - -/* here is the "old" 32 bits structure structure */ -typedef struct zlib_filefunc_def_s -{ - open_file_func zopen_file; - opendisk_file_func zopendisk_file; - read_file_func zread_file; - write_file_func zwrite_file; - tell_file_func ztell_file; - seek_file_func zseek_file; - close_file_func zclose_file; - error_file_func zerror_file; - voidpf opaque; -} zlib_filefunc_def; - -typedef uint64_t (ZCALLBACK *tell64_file_func) (voidpf opaque, voidpf stream); -typedef long (ZCALLBACK *seek64_file_func) (voidpf opaque, voidpf stream, uint64_t offset, int origin); -typedef voidpf (ZCALLBACK *open64_file_func) (voidpf opaque, const void *filename, int mode); -typedef voidpf (ZCALLBACK *opendisk64_file_func)(voidpf opaque, voidpf stream, uint32_t number_disk, int mode); - -typedef struct zlib_filefunc64_def_s -{ - open64_file_func zopen64_file; - opendisk64_file_func zopendisk64_file; - read_file_func zread_file; - write_file_func zwrite_file; - tell64_file_func ztell64_file; - seek64_file_func zseek64_file; - close_file_func zclose_file; - error_file_func zerror_file; - voidpf opaque; -} zlib_filefunc64_def; - -void fill_fopen_filefunc(zlib_filefunc_def *pzlib_filefunc_def); -void fill_fopen64_filefunc(zlib_filefunc64_def *pzlib_filefunc_def); - -/* now internal definition, only for zip.c and unzip.h */ -typedef struct zlib_filefunc64_32_def_s -{ - zlib_filefunc64_def zfile_func64; - open_file_func zopen32_file; - opendisk_file_func zopendisk32_file; - tell_file_func ztell32_file; - seek_file_func zseek32_file; -} zlib_filefunc64_32_def; - -#define ZREAD64(filefunc,filestream,buf,size) ((*((filefunc).zfile_func64.zread_file)) ((filefunc).zfile_func64.opaque,filestream,buf,size)) -#define ZWRITE64(filefunc,filestream,buf,size) ((*((filefunc).zfile_func64.zwrite_file)) ((filefunc).zfile_func64.opaque,filestream,buf,size)) -/*#define ZTELL64(filefunc,filestream) ((*((filefunc).ztell64_file)) ((filefunc).opaque,filestream))*/ -/*#define ZSEEK64(filefunc,filestream,pos,mode) ((*((filefunc).zseek64_file)) ((filefunc).opaque,filestream,pos,mode))*/ -#define ZCLOSE64(filefunc,filestream) ((*((filefunc).zfile_func64.zclose_file)) ((filefunc).zfile_func64.opaque,filestream)) -#define ZERROR64(filefunc,filestream) ((*((filefunc).zfile_func64.zerror_file)) ((filefunc).zfile_func64.opaque,filestream)) - -voidpf call_zopen64(const zlib_filefunc64_32_def *pfilefunc,const void*filename, int mode); -voidpf call_zopendisk64(const zlib_filefunc64_32_def *pfilefunc, voidpf filestream, uint32_t number_disk, int mode); -long call_zseek64(const zlib_filefunc64_32_def *pfilefunc, voidpf filestream, uint64_t offset, int origin); -uint64_t call_ztell64(const zlib_filefunc64_32_def *pfilefunc, voidpf filestream); - -void fill_zlib_filefunc64_32_def_from_filefunc32(zlib_filefunc64_32_def *p_filefunc64_32, const zlib_filefunc_def *p_filefunc32); - -#define ZOPEN64(filefunc,filename,mode) (call_zopen64((&(filefunc)),(filename),(mode))) -#define ZOPENDISK64(filefunc,filestream,diskn,mode) (call_zopendisk64((&(filefunc)),(filestream),(diskn),(mode))) -#define ZTELL64(filefunc,filestream) (call_ztell64((&(filefunc)),(filestream))) -#define ZSEEK64(filefunc,filestream,pos,mode) (call_zseek64((&(filefunc)),(filestream),(pos),(mode))) - -#ifdef __cplusplus -} -#endif - -#endif diff --git a/Externals/minizip/minigzip.c b/Externals/minizip/minigzip.c new file mode 100644 index 000000000000..0bd8c7989572 --- /dev/null +++ b/Externals/minizip/minigzip.c @@ -0,0 +1,182 @@ +/* minigzip.c + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_os.h" +#include "mz_strm_zlib.h" + +#include /* printf */ + +/***************************************************************************/ + +#define MZ_GZIP_COMPRESS (1) +#define MZ_GZIP_DECOMPRESS (2) + +int32_t minigzip_banner(void); +int32_t minigzip_help(void); + +/***************************************************************************/ + +int32_t minigzip_banner(void) { + printf("Minigzip %s - https://github.com/zlib-ng/minizip-ng\n", MZ_VERSION); + printf("---------------------------------------------------\n"); + return MZ_OK; +} + +int32_t minigzip_help(void) { + printf("Usage: minigzip [-x] [-d] [-0 to -9] [files]\n\n" \ + " -x Extract file\n" \ + " -d Destination directory\n" \ + " -0 Store only\n" \ + " -1 Compress faster\n" \ + " -9 Compress better\n\n"); + return MZ_OK; +} + +/***************************************************************************/ + +int32_t minigzip_copy(const char *path, const char *destination, int16_t operation, int16_t level) { + void *target_stream = NULL; + void *source_stream = NULL; + void *zlib_stream = NULL; + const char *filename = NULL; + char target_path[1024]; + int32_t err = 0; + + + memset(target_path, 0, sizeof(target_path)); + + if (destination != NULL) { + if (mz_os_file_exists(destination) != MZ_OK) + mz_dir_make(destination); + } + + if (operation == MZ_GZIP_COMPRESS) { + mz_path_combine(target_path, path, sizeof(target_path)); + strncat(target_path, ".gz", sizeof(target_path) - strlen(target_path) - 1); + printf("Compressing to %s\n", target_path); + } else if (operation == MZ_GZIP_DECOMPRESS) { + if (destination != NULL) + mz_path_combine(target_path, destination, sizeof(target_path)); + + if (mz_path_get_filename(path, &filename) != MZ_OK) + filename = path; + + mz_path_combine(target_path, filename, sizeof(target_path)); + mz_path_remove_extension(target_path); + printf("Decompressing to %s\n", target_path); + } + + mz_stream_zlib_create(&zlib_stream); + mz_stream_zlib_set_prop_int64(zlib_stream, MZ_STREAM_PROP_COMPRESS_WINDOW, 15 + 16); + + mz_stream_os_create(&source_stream); + err = mz_stream_os_open(source_stream, path, MZ_OPEN_MODE_READ); + + if (err == MZ_OK) { + mz_stream_os_create(&target_stream); + err = mz_stream_os_open(target_stream, target_path, MZ_OPEN_MODE_CREATE | MZ_OPEN_MODE_WRITE); + + if (err == MZ_OK) { + if (operation == MZ_GZIP_COMPRESS) { + mz_stream_zlib_set_prop_int64(zlib_stream, MZ_STREAM_PROP_COMPRESS_LEVEL, level); + mz_stream_zlib_open(zlib_stream, target_path, MZ_OPEN_MODE_WRITE); + mz_stream_set_base(zlib_stream, target_stream); + err = mz_stream_copy_to_end(zlib_stream, source_stream); + } else if (operation == MZ_GZIP_DECOMPRESS) { + mz_stream_zlib_open(zlib_stream, path, MZ_OPEN_MODE_READ); + mz_stream_set_base(zlib_stream, source_stream); + err = mz_stream_copy_to_end(target_stream, zlib_stream); + } + + if (err != MZ_OK) + printf("Error %d in zlib stream (%d)\n", err, mz_stream_zlib_error(zlib_stream)); + else + printf("Operation completed successfully\n"); + + mz_stream_zlib_close(zlib_stream); + } else { + printf("Error %d opening target path %s\n", err, target_path); + } + + mz_stream_os_close(target_stream); + mz_stream_os_delete(&target_stream); + } else { + printf("Error %d opening source path %s\n", err, path); + } + + mz_stream_os_close(source_stream); + mz_stream_os_delete(&source_stream); + + mz_stream_zlib_delete(&zlib_stream); + return err; +} + +/***************************************************************************/ + +#if !defined(MZ_ZIP_NO_MAIN) +int main(int argc, const char *argv[]) { + int16_t operation_level = MZ_COMPRESS_LEVEL_DEFAULT; + int32_t path_arg = 0; + int32_t err = 0; + int32_t i = 0; + uint8_t operation = MZ_GZIP_COMPRESS; + const char *path = NULL; + const char *destination = NULL; + + + minigzip_banner(); + if (argc == 1) { + minigzip_help(); + return 0; + } + + /* Parse command line options */ + for (i = 1; i < argc; i += 1) { + printf("%s ", argv[i]); + if (argv[i][0] == '-') { + char c = argv[i][1]; + if ((c == 'x') || (c == 'X')) + operation = MZ_GZIP_DECOMPRESS; + else if ((c >= '0') && (c <= '9')) + operation_level = (c - '0'); + else if (((c == 'd') || (c == 'D')) && (i + 1 < argc)) { + destination = argv[i + 1]; + printf("%s ", argv[i + 1]); + i += 1; + } else { + err = MZ_SUPPORT_ERROR; + } + } else if (path_arg == 0) { + path_arg = i; + break; + } + } + printf("\n"); + + if (err == MZ_SUPPORT_ERROR) { + printf("Feature not supported\n"); + return err; + } + + if (path_arg == 0) { + minigzip_help(); + return 0; + } + + path = argv[path_arg]; + err = minigzip_copy(path, destination, operation, operation_level); + + return err; +} +#endif diff --git a/Externals/minizip/minizip.c b/Externals/minizip/minizip.c new file mode 100644 index 000000000000..01bb559eb8cb --- /dev/null +++ b/Externals/minizip/minizip.c @@ -0,0 +1,671 @@ +/* minizip.c + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_buf.h" +#include "mz_strm_split.h" +#include "mz_zip.h" +#include "mz_zip_rw.h" + +#include /* printf */ + +/***************************************************************************/ + +typedef struct minizip_opt_s { + uint8_t include_path; + int16_t compress_level; + uint8_t compress_method; + uint8_t overwrite; + uint8_t append; + int64_t disk_size; + uint8_t follow_links; + uint8_t store_links; + uint8_t zip_cd; + int32_t encoding; + uint8_t verbose; + uint8_t aes; + const char *cert_path; + const char *cert_pwd; +} minizip_opt; + +/***************************************************************************/ + +int32_t minizip_banner(void); +int32_t minizip_help(void); + +int32_t minizip_list(const char *path); + +int32_t minizip_add_entry_cb(void *handle, void *userdata, mz_zip_file *file_info); +int32_t minizip_add_progress_cb(void *handle, void *userdata, mz_zip_file *file_info, int64_t position); +int32_t minizip_add_overwrite_cb(void *handle, void *userdata, const char *path); +int32_t minizip_add(const char *path, const char *password, minizip_opt *options, int32_t arg_count, const char **args); + +int32_t minizip_extract_entry_cb(void *handle, void *userdata, mz_zip_file *file_info, const char *path); +int32_t minizip_extract_progress_cb(void *handle, void *userdata, mz_zip_file *file_info, int64_t position); +int32_t minizip_extract_overwrite_cb(void *handle, void *userdata, mz_zip_file *file_info, const char *path); +int32_t minizip_extract(const char *path, const char *pattern, const char *destination, const char *password, minizip_opt *options); + +int32_t minizip_erase(const char *src_path, const char *target_path, int32_t arg_count, const char **args); + +/***************************************************************************/ + +int32_t minizip_banner(void) { + printf("minizip-ng %s - https://github.com/zlib-ng/minizip-ng\n", MZ_VERSION); + printf("---------------------------------------------------\n"); + return MZ_OK; +} + +int32_t minizip_help(void) { + printf("Usage: minizip [-x][-d dir|-l|-e][-o][-f][-y][-c cp][-a][-0 to -9][-b|-m|-t][-k 512][-p pwd][-s] file.zip [files]\n\n" \ + " -x Extract files\n" \ + " -l List files\n" \ + " -d Destination directory\n" \ + " -e Erase files\n" \ + " -o Overwrite existing files\n" \ + " -c File names use cp437 encoding (or specified codepage)\n" \ + " -a Append to existing zip file\n" \ + " -i Include full path of files\n" \ + " -f Follow symbolic links\n" \ + " -y Store symbolic links\n" \ + " -v Verbose info\n" \ + " -0 Store only\n" \ + " -1 Compress faster\n" \ + " -9 Compress better\n" \ + " -k Disk size in KB\n" \ + " -z Zip central directory\n" \ + " -p Encryption password\n" \ + " -s AES encryption\n" \ + " -h PKCS12 certificate path\n" \ + " -w PKCS12 certificate password\n" \ + " -b BZIP2 compression\n" \ + " -m LZMA compression\n" \ + " -n XZ compression\n" \ + " -t ZSTD compression\n\n"); + return MZ_OK; +} + +/***************************************************************************/ + +int32_t minizip_list(const char *path) { + mz_zip_file *file_info = NULL; + uint32_t ratio = 0; + int32_t err = MZ_OK; + struct tm tmu_date; + const char *method = NULL; + char crypt = ' '; + void *reader = NULL; + + + mz_zip_reader_create(&reader); + err = mz_zip_reader_open_file(reader, path); + if (err != MZ_OK) { + printf("Error %" PRId32 " opening archive %s\n", err, path); + mz_zip_reader_delete(&reader); + return err; + } + + err = mz_zip_reader_goto_first_entry(reader); + + if (err != MZ_OK && err != MZ_END_OF_LIST) { + printf("Error %" PRId32 " going to first entry in archive\n", err); + mz_zip_reader_delete(&reader); + return err; + } + + printf(" Packed Unpacked Ratio Method Attribs Date Time CRC-32 Name\n"); + printf(" ------ -------- ----- ------ ------- ---- ---- ------ ----\n"); + + /* Enumerate all entries in the archive */ + do { + err = mz_zip_reader_entry_get_info(reader, &file_info); + + if (err != MZ_OK) { + printf("Error %" PRId32 " getting entry info in archive\n", err); + break; + } + + ratio = 0; + if (file_info->uncompressed_size > 0) + ratio = (uint32_t)((file_info->compressed_size * 100) / file_info->uncompressed_size); + + /* Display a '*' if the file is encrypted */ + if (file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) + crypt = '*'; + else + crypt = ' '; + + method = mz_zip_get_compression_method_string(file_info->compression_method); + mz_zip_time_t_to_tm(file_info->modified_date, &tmu_date); + + /* Print entry information */ + printf("%12" PRId64 " %12" PRId64 " %3" PRIu32 "%% %6s%c %8" PRIx32 " %2.2" PRIu32 \ + "-%2.2" PRIu32 "-%2.2" PRIu32 " %2.2" PRIu32 ":%2.2" PRIu32 " %8.8" PRIx32 " %s\n", + file_info->compressed_size, file_info->uncompressed_size, ratio, + method, crypt, file_info->external_fa, + (uint32_t)tmu_date.tm_mon + 1, (uint32_t)tmu_date.tm_mday, + (uint32_t)tmu_date.tm_year % 100, + (uint32_t)tmu_date.tm_hour, (uint32_t)tmu_date.tm_min, + file_info->crc, file_info->filename); + + err = mz_zip_reader_goto_next_entry(reader); + + if (err != MZ_OK && err != MZ_END_OF_LIST) { + printf("Error %" PRId32 " going to next entry in archive\n", err); + break; + } + } while (err == MZ_OK); + + mz_zip_reader_delete(&reader); + + if (err == MZ_END_OF_LIST) + return MZ_OK; + + return err; +} + +/***************************************************************************/ + +int32_t minizip_add_entry_cb(void *handle, void *userdata, mz_zip_file *file_info) { + MZ_UNUSED(handle); + MZ_UNUSED(userdata); + + /* Print the current file we are trying to compress */ + printf("Adding %s\n", file_info->filename); + return MZ_OK; +} + +int32_t minizip_add_progress_cb(void *handle, void *userdata, mz_zip_file *file_info, int64_t position) { + minizip_opt *options = (minizip_opt *)userdata; + double progress = 0; + uint8_t raw = 0; + + MZ_UNUSED(userdata); + + mz_zip_writer_get_raw(handle, &raw); + + if (raw && file_info->compressed_size > 0) + progress = ((double)position / file_info->compressed_size) * 100; + else if (!raw && file_info->uncompressed_size > 0) + progress = ((double)position / file_info->uncompressed_size) * 100; + + /* Print the progress of the current compress operation */ + if (options->verbose) + printf("%s - %" PRId64 " / %" PRId64 " (%.02f%%)\n", file_info->filename, position, + file_info->uncompressed_size, progress); + return MZ_OK; +} + +int32_t minizip_add_overwrite_cb(void *handle, void *userdata, const char *path) { + minizip_opt *options = (minizip_opt *)userdata; + + MZ_UNUSED(handle); + + if (options->overwrite == 0) { + /* If ask the user what to do because append and overwrite args not set */ + char rep = 0; + do { + char answer[128]; + printf("The file %s exists. Overwrite ? [y]es, [n]o, [a]ppend : ", path); + if (scanf("%1s", answer) != 1) + exit(EXIT_FAILURE); + rep = answer[0]; + + if ((rep >= 'a') && (rep <= 'z')) + rep -= 0x20; + } while ((rep != 'Y') && (rep != 'N') && (rep != 'A')); + + if (rep == 'A') { + return MZ_EXIST_ERROR; + } else if (rep == 'N') { + return MZ_INTERNAL_ERROR; + } + } + + return MZ_OK; +} + +int32_t minizip_add(const char *path, const char *password, minizip_opt *options, int32_t arg_count, const char **args) { + void *writer = NULL; + int32_t err = MZ_OK; + int32_t err_close = MZ_OK; + int32_t i = 0; + const char *filename_in_zip = NULL; + + + printf("Archive %s\n", path); + + /* Create zip writer */ + mz_zip_writer_create(&writer); + mz_zip_writer_set_password(writer, password); + mz_zip_writer_set_aes(writer, options->aes); + mz_zip_writer_set_compress_method(writer, options->compress_method); + mz_zip_writer_set_compress_level(writer, options->compress_level); + mz_zip_writer_set_follow_links(writer, options->follow_links); + mz_zip_writer_set_store_links(writer, options->store_links); + mz_zip_writer_set_overwrite_cb(writer, options, minizip_add_overwrite_cb); + mz_zip_writer_set_progress_cb(writer, options, minizip_add_progress_cb); + mz_zip_writer_set_entry_cb(writer, options, minizip_add_entry_cb); + mz_zip_writer_set_zip_cd(writer, options->zip_cd); + if (options->cert_path != NULL) + mz_zip_writer_set_certificate(writer, options->cert_path, options->cert_pwd); + + err = mz_zip_writer_open_file(writer, path, options->disk_size, options->append); + + if (err == MZ_OK) { + for (i = 0; i < arg_count; i += 1) { + filename_in_zip = args[i]; + + /* Add file system path to archive */ + err = mz_zip_writer_add_path(writer, filename_in_zip, NULL, options->include_path, 1); + if (err != MZ_OK) + printf("Error %" PRId32 " adding path to archive %s\n", err, filename_in_zip); + } + } else { + printf("Error %" PRId32 " opening archive for writing\n", err); + } + + err_close = mz_zip_writer_close(writer); + if (err_close != MZ_OK) { + printf("Error %" PRId32 " closing archive for writing %s\n", err_close, path); + err = err_close; + } + + mz_zip_writer_delete(&writer); + return err; +} + +/***************************************************************************/ + +int32_t minizip_extract_entry_cb(void *handle, void *userdata, mz_zip_file *file_info, const char *path) { + MZ_UNUSED(handle); + MZ_UNUSED(userdata); + MZ_UNUSED(path); + + /* Print the current entry extracting */ + printf("Extracting %s\n", file_info->filename); + return MZ_OK; +} + +int32_t minizip_extract_progress_cb(void *handle, void *userdata, mz_zip_file *file_info, int64_t position) { + minizip_opt *options = (minizip_opt *)userdata; + double progress = 0; + uint8_t raw = 0; + + MZ_UNUSED(userdata); + + mz_zip_reader_get_raw(handle, &raw); + + if (raw && file_info->compressed_size > 0) + progress = ((double)position / file_info->compressed_size) * 100; + else if (!raw && file_info->uncompressed_size > 0) + progress = ((double)position / file_info->uncompressed_size) * 100; + + /* Print the progress of the current extraction */ + if (options->verbose) + printf("%s - %" PRId64 " / %" PRId64 " (%.02f%%)\n", file_info->filename, position, + file_info->uncompressed_size, progress); + + return MZ_OK; +} + +int32_t minizip_extract_overwrite_cb(void *handle, void *userdata, mz_zip_file *file_info, const char *path) { + minizip_opt *options = (minizip_opt *)userdata; + + MZ_UNUSED(handle); + MZ_UNUSED(file_info); + + /* Verify if we want to overwrite current entry on disk */ + if (options->overwrite == 0) { + char rep = 0; + do { + char answer[128]; + printf("The file %s exists. Overwrite ? [y]es, [n]o, [A]ll: ", path); + if (scanf("%1s", answer) != 1) + exit(EXIT_FAILURE); + rep = answer[0]; + if ((rep >= 'a') && (rep <= 'z')) + rep -= 0x20; + } while ((rep != 'Y') && (rep != 'N') && (rep != 'A')); + + if (rep == 'N') + return MZ_EXIST_ERROR; + if (rep == 'A') + options->overwrite = 1; + } + + return MZ_OK; +} + +int32_t minizip_extract(const char *path, const char *pattern, const char *destination, const char *password, minizip_opt *options) { + void *reader = NULL; + int32_t err = MZ_OK; + int32_t err_close = MZ_OK; + + + printf("Archive %s\n", path); + + /* Create zip reader */ + mz_zip_reader_create(&reader); + mz_zip_reader_set_pattern(reader, pattern, 1); + mz_zip_reader_set_password(reader, password); + mz_zip_reader_set_encoding(reader, options->encoding); + mz_zip_reader_set_entry_cb(reader, options, minizip_extract_entry_cb); + mz_zip_reader_set_progress_cb(reader, options, minizip_extract_progress_cb); + mz_zip_reader_set_overwrite_cb(reader, options, minizip_extract_overwrite_cb); + + err = mz_zip_reader_open_file(reader, path); + + if (err != MZ_OK) { + printf("Error %" PRId32 " opening archive %s\n", err, path); + } else { + /* Save all entries in archive to destination directory */ + err = mz_zip_reader_save_all(reader, destination); + + if (err == MZ_END_OF_LIST) { + if (pattern != NULL) { + printf("Files matching %s not found in archive\n", pattern); + } else { + printf("No files in archive\n"); + err = MZ_OK; + } + } else if (err != MZ_OK) { + printf("Error %" PRId32 " saving entries to disk %s\n", err, path); + } + } + + err_close = mz_zip_reader_close(reader); + if (err_close != MZ_OK) { + printf("Error %" PRId32 " closing archive for reading\n", err_close); + err = err_close; + } + + mz_zip_reader_delete(&reader); + return err; +} + +/***************************************************************************/ + +int32_t minizip_erase(const char *src_path, const char *target_path, int32_t arg_count, const char **args) { + mz_zip_file *file_info = NULL; + const char *filename_in_zip = NULL; + const char *target_path_ptr = target_path; + void *reader = NULL; + void *writer = NULL; + int32_t skip = 0; + int32_t err = MZ_OK; + int32_t i = 0; + uint8_t zip_cd = 0; + char bak_path[256]; + char tmp_path[256]; + + if (target_path == NULL) { + /* Construct temporary zip name */ + strncpy(tmp_path, src_path, sizeof(tmp_path) - 1); + tmp_path[sizeof(tmp_path) - 1] = 0; + strncat(tmp_path, ".tmp.zip", sizeof(tmp_path) - strlen(tmp_path) - 1); + target_path_ptr = tmp_path; + } + + mz_zip_reader_create(&reader); + mz_zip_writer_create(&writer); + + /* Open original archive we want to erase an entry in */ + err = mz_zip_reader_open_file(reader, src_path); + if (err != MZ_OK) { + printf("Error %" PRId32 " opening archive for reading %s\n", err, src_path); + mz_zip_reader_delete(&reader); + return err; + } + + /* Open temporary archive */ + err = mz_zip_writer_open_file(writer, target_path_ptr, 0, 0); + if (err != MZ_OK) { + printf("Error %" PRId32 " opening archive for writing %s\n", err, target_path_ptr); + mz_zip_reader_delete(&reader); + mz_zip_writer_delete(&writer); + return err; + } + + err = mz_zip_reader_goto_first_entry(reader); + + if (err != MZ_OK && err != MZ_END_OF_LIST) + printf("Error %" PRId32 " going to first entry in archive\n", err); + + while (err == MZ_OK) { + err = mz_zip_reader_entry_get_info(reader, &file_info); + if (err != MZ_OK) { + printf("Error %" PRId32 " getting info from archive\n", err); + break; + } + + /* Copy all entries from original archive to temporary archive + except the ones we don't want */ + for (i = 0, skip = 0; i < arg_count; i += 1) { + filename_in_zip = args[i]; + + if (mz_path_compare_wc(file_info->filename, filename_in_zip, 1) == MZ_OK) + skip = 1; + } + + if (skip) { + printf("Skipping %s\n", file_info->filename); + } else { + printf("Copying %s\n", file_info->filename); + err = mz_zip_writer_copy_from_reader(writer, reader); + } + + if (err != MZ_OK) { + printf("Error %" PRId32 " copying entry into new zip\n", err); + break; + } + + err = mz_zip_reader_goto_next_entry(reader); + + if (err != MZ_OK && err != MZ_END_OF_LIST) + printf("Error %" PRId32 " going to next entry in archive\n", err); + } + + mz_zip_reader_get_zip_cd(reader, &zip_cd); + mz_zip_writer_set_zip_cd(writer, zip_cd); + + mz_zip_reader_close(reader); + mz_zip_reader_delete(&reader); + + mz_zip_writer_close(writer); + mz_zip_writer_delete(&writer); + + if (err == MZ_END_OF_LIST) { + if (target_path == NULL) { + /* Swap original archive with temporary archive, backup old archive if possible */ + strncpy(bak_path, src_path, sizeof(bak_path) - 1); + bak_path[sizeof(bak_path) - 1] = 0; + strncat(bak_path, ".bak", sizeof(bak_path) - strlen(bak_path) - 1); + + if (mz_os_file_exists(bak_path) == MZ_OK) + mz_os_unlink(bak_path); + + if (mz_os_rename(src_path, bak_path) != MZ_OK) + printf("Error backing up archive before replacing %s\n", bak_path); + + if (mz_os_rename(tmp_path, src_path) != MZ_OK) + printf("Error replacing archive with temp %s\n", tmp_path); + } + + return MZ_OK; + } + + return err; +} + +/***************************************************************************/ + +#if !defined(MZ_ZIP_NO_MAIN) +int main(int argc, const char *argv[]) { + minizip_opt options; + int32_t path_arg = 0; + int32_t err = 0; + int32_t i = 0; + uint8_t do_list = 0; + uint8_t do_extract = 0; + uint8_t do_erase = 0; + const char *path = NULL; + const char *password = NULL; + const char *destination = NULL; + const char *filename_to_extract = NULL; + + + minizip_banner(); + if (argc == 1) { + minizip_help(); + return 0; + } + + memset(&options, 0, sizeof(options)); + + options.compress_method = MZ_COMPRESS_METHOD_DEFLATE; + options.compress_level = MZ_COMPRESS_LEVEL_DEFAULT; + + /* Parse command line options */ + for (i = 1; i < argc; i += 1) { + printf("%s ", argv[i]); + if (argv[i][0] == '-') { + char c = argv[i][1]; + if ((c == 'l') || (c == 'L')) + do_list = 1; + else if ((c == 'x') || (c == 'X')) + do_extract = 1; + else if ((c == 'e') || (c == 'E')) + do_erase = 1; + else if ((c == 'a') || (c == 'A')) + options.append = 1; + else if ((c == 'o') || (c == 'O')) + options.overwrite = 1; + else if ((c == 'f') || (c == 'F')) + options.follow_links = 1; + else if ((c == 'y') || (c == 'Y')) + options.store_links = 1; + else if ((c == 'i') || (c == 'I')) + options.include_path = 1; + else if ((c == 'z') || (c == 'Z')) + options.zip_cd = 1; + else if ((c == 'v') || (c == 'V')) + options.verbose = 1; + else if ((c >= '0') && (c <= '9')) { + options.compress_level = (c - '0'); + if (options.compress_level == 0) + options.compress_method = MZ_COMPRESS_METHOD_STORE; + } else if ((c == 'b') || (c == 'B')) +#ifdef HAVE_BZIP2 + options.compress_method = MZ_COMPRESS_METHOD_BZIP2; +#else + err = MZ_SUPPORT_ERROR; +#endif + else if ((c == 'm') || (c == 'M')) +#ifdef HAVE_LZMA + options.compress_method = MZ_COMPRESS_METHOD_LZMA; +#else + err = MZ_SUPPORT_ERROR; +#endif + else if ((c == 'n') || (c == 'N')) +#if defined(HAVE_LZMA) || defined(HAVE_LIBCOMP) + options.compress_method = MZ_COMPRESS_METHOD_XZ; +#else + err = MZ_SUPPORT_ERROR; +#endif + else if ((c == 't') || (c == 'T')) +#ifdef HAVE_ZSTD + options.compress_method = MZ_COMPRESS_METHOD_ZSTD; +#else + err = MZ_SUPPORT_ERROR; +#endif + else if ((c == 's') || (c == 'S')) +#ifdef HAVE_WZAES + options.aes = 1; +#else + err = MZ_SUPPORT_ERROR; +#endif + else if (((c == 'h') || (c == 'H')) && (i + 1 < argc)) { +#ifdef MZ_ZIP_SIGNING + options.cert_path = argv[i + 1]; + printf("%s ", argv[i + 1]); +#else + err = MZ_SUPPORT_ERROR; +#endif + i += 1; + } else if (((c == 'w') || (c == 'W')) && (i + 1 < argc)) { +#ifdef MZ_ZIP_SIGNING + options.cert_pwd = argv[i + 1]; + printf("%s ", argv[i + 1]); +#else + err = MZ_SUPPORT_ERROR; +#endif + i += 1; + } else if (((c == 'c') || (c == 'C')) && (i + 1 < argc)) { + options.encoding = (int32_t)atoi(argv[i + 1]); + i += 1; + } else if (((c == 'k') || (c == 'K')) && (i + 1 < argc)) { + options.disk_size = (int64_t)atoi(argv[i + 1]) * 1024; + printf("%s ", argv[i + 1]); + i += 1; + } else if (((c == 'd') || (c == 'D')) && (i + 1 < argc)) { + destination = argv[i + 1]; + printf("%s ", argv[i + 1]); + i += 1; + } else if (((c == 'p') || (c == 'P')) && (i + 1 < argc)) { +#ifndef MZ_ZIP_NO_ENCRYPTION + password = argv[i + 1]; + printf("*** "); +#else + err = MZ_SUPPORT_ERROR; +#endif + i += 1; + } + } else if (path_arg == 0) + path_arg = i; + } + printf("\n"); + + if (err == MZ_SUPPORT_ERROR) { + printf("Feature not supported\n"); + return err; + } + + if (path_arg == 0) { + minizip_help(); + return 0; + } + + path = argv[path_arg]; + + if (do_list) { + /* List archive contents */ + err = minizip_list(path); + } else if (do_extract) { + if (argc > path_arg + 1) + filename_to_extract = argv[path_arg + 1]; + + /* Extract archive */ + err = minizip_extract(path, filename_to_extract, destination, password, &options); + } else if (do_erase) { + /* Erase file in archive */ + err = minizip_erase(path, NULL, argc - (path_arg + 1), &argv[path_arg + 1]); + } else { + /* Add files to archive */ + err = minizip_add(path, password, &options, argc - (path_arg + 1), &argv[path_arg + 1]); + } + + return err; +} +#endif diff --git a/Externals/minizip/minizip.pc.cmakein b/Externals/minizip/minizip.pc.cmakein new file mode 100644 index 000000000000..d965a346c590 --- /dev/null +++ b/Externals/minizip/minizip.pc.cmakein @@ -0,0 +1,14 @@ +prefix=@CMAKE_INSTALL_PREFIX@ +exec_prefix=@CMAKE_INSTALL_PREFIX@ +libdir=@CMAKE_INSTALL_FULL_LIBDIR@ +sharedlibdir=@CMAKE_INSTALL_FULL_LIBDIR@ +includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@ + +Name: @PROJECT_NAME@ +Description: Minizip zip file manipulation library +Version: @VERSION@ + +Requires: zlib +Libs: -L${libdir} -L${sharedlibdir} -l@PROJECT_NAME@ +Libs.private:@PC_PRIVATE_LIBS@ +Cflags: -I${includedir} diff --git a/Externals/minizip/minizip.vcxproj b/Externals/minizip/minizip.vcxproj index fe499a08251e..e454f85ed899 100644 --- a/Externals/minizip/minizip.vcxproj +++ b/Externals/minizip/minizip.vcxproj @@ -17,19 +17,41 @@ - NOUNCRYPT;%(PreprocessorDefinitions) + HAVE_ZLIB;ZLIB_COMPAT;MZ_ZIP_NO_CRYPTO;MZ_ZIP_NO_ENCRYPTION;HAVE_STDINT_H;HAVE_INTTYPES_H;NO_FSEEKO;%(PreprocessorDefinitions) - - + + + + + + + + + + + + - + + + + + + + + + + + + + diff --git a/Externals/minizip/mz.h b/Externals/minizip/mz.h new file mode 100644 index 000000000000..82a14a23e0e2 --- /dev/null +++ b/Externals/minizip/mz.h @@ -0,0 +1,274 @@ +/* mz.h -- Errors codes, zip flags and magic + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_H +#define MZ_H + +/***************************************************************************/ + +/* MZ_VERSION */ +#define MZ_VERSION ("3.0.4") +#define MZ_VERSION_BUILD (030004) + +/* MZ_ERROR */ +#define MZ_OK (0) /* zlib */ +#define MZ_STREAM_ERROR (-1) /* zlib */ +#define MZ_DATA_ERROR (-3) /* zlib */ +#define MZ_MEM_ERROR (-4) /* zlib */ +#define MZ_BUF_ERROR (-5) /* zlib */ +#define MZ_VERSION_ERROR (-6) /* zlib */ + +#define MZ_END_OF_LIST (-100) +#define MZ_END_OF_STREAM (-101) + +#define MZ_PARAM_ERROR (-102) +#define MZ_FORMAT_ERROR (-103) +#define MZ_INTERNAL_ERROR (-104) +#define MZ_CRC_ERROR (-105) +#define MZ_CRYPT_ERROR (-106) +#define MZ_EXIST_ERROR (-107) +#define MZ_PASSWORD_ERROR (-108) +#define MZ_SUPPORT_ERROR (-109) +#define MZ_HASH_ERROR (-110) +#define MZ_OPEN_ERROR (-111) +#define MZ_CLOSE_ERROR (-112) +#define MZ_SEEK_ERROR (-113) +#define MZ_TELL_ERROR (-114) +#define MZ_READ_ERROR (-115) +#define MZ_WRITE_ERROR (-116) +#define MZ_SIGN_ERROR (-117) +#define MZ_SYMLINK_ERROR (-118) + +/* MZ_OPEN */ +#define MZ_OPEN_MODE_READ (0x01) +#define MZ_OPEN_MODE_WRITE (0x02) +#define MZ_OPEN_MODE_READWRITE (MZ_OPEN_MODE_READ | MZ_OPEN_MODE_WRITE) +#define MZ_OPEN_MODE_APPEND (0x04) +#define MZ_OPEN_MODE_CREATE (0x08) +#define MZ_OPEN_MODE_EXISTING (0x10) + +/* MZ_SEEK */ +#define MZ_SEEK_SET (0) +#define MZ_SEEK_CUR (1) +#define MZ_SEEK_END (2) + +/* MZ_COMPRESS */ +#define MZ_COMPRESS_METHOD_STORE (0) +#define MZ_COMPRESS_METHOD_DEFLATE (8) +#define MZ_COMPRESS_METHOD_BZIP2 (12) +#define MZ_COMPRESS_METHOD_LZMA (14) +#define MZ_COMPRESS_METHOD_ZSTD (93) +#define MZ_COMPRESS_METHOD_XZ (95) +#define MZ_COMPRESS_METHOD_AES (99) + +#define MZ_COMPRESS_LEVEL_DEFAULT (-1) +#define MZ_COMPRESS_LEVEL_FAST (2) +#define MZ_COMPRESS_LEVEL_NORMAL (6) +#define MZ_COMPRESS_LEVEL_BEST (9) + +/* MZ_ZIP_FLAG */ +#define MZ_ZIP_FLAG_ENCRYPTED (1 << 0) +#define MZ_ZIP_FLAG_LZMA_EOS_MARKER (1 << 1) +#define MZ_ZIP_FLAG_DEFLATE_MAX (1 << 1) +#define MZ_ZIP_FLAG_DEFLATE_NORMAL (0) +#define MZ_ZIP_FLAG_DEFLATE_FAST (1 << 2) +#define MZ_ZIP_FLAG_DEFLATE_SUPER_FAST (MZ_ZIP_FLAG_DEFLATE_FAST | \ + MZ_ZIP_FLAG_DEFLATE_MAX) +#define MZ_ZIP_FLAG_DATA_DESCRIPTOR (1 << 3) +#define MZ_ZIP_FLAG_UTF8 (1 << 11) +#define MZ_ZIP_FLAG_MASK_LOCAL_INFO (1 << 13) + +/* MZ_ZIP_EXTENSION */ +#define MZ_ZIP_EXTENSION_ZIP64 (0x0001) +#define MZ_ZIP_EXTENSION_NTFS (0x000a) +#define MZ_ZIP_EXTENSION_AES (0x9901) +#define MZ_ZIP_EXTENSION_UNIX1 (0x000d) +#define MZ_ZIP_EXTENSION_SIGN (0x10c5) +#define MZ_ZIP_EXTENSION_HASH (0x1a51) +#define MZ_ZIP_EXTENSION_CDCD (0xcdcd) + +/* MZ_ZIP64 */ +#define MZ_ZIP64_AUTO (0) +#define MZ_ZIP64_FORCE (1) +#define MZ_ZIP64_DISABLE (2) + +/* MZ_HOST_SYSTEM */ +#define MZ_HOST_SYSTEM(VERSION_MADEBY) ((uint8_t)(VERSION_MADEBY >> 8)) +#define MZ_HOST_SYSTEM_MSDOS (0) +#define MZ_HOST_SYSTEM_UNIX (3) +#define MZ_HOST_SYSTEM_WINDOWS_NTFS (10) +#define MZ_HOST_SYSTEM_RISCOS (13) +#define MZ_HOST_SYSTEM_OSX_DARWIN (19) + +/* MZ_PKCRYPT */ +#define MZ_PKCRYPT_HEADER_SIZE (12) + +/* MZ_AES */ +#define MZ_AES_VERSION (1) +#define MZ_AES_ENCRYPTION_MODE_128 (0x01) +#define MZ_AES_ENCRYPTION_MODE_192 (0x02) +#define MZ_AES_ENCRYPTION_MODE_256 (0x03) +#define MZ_AES_KEY_LENGTH(MODE) (8 * (MODE & 3) + 8) +#define MZ_AES_KEY_LENGTH_MAX (32) +#define MZ_AES_BLOCK_SIZE (16) +#define MZ_AES_HEADER_SIZE(MODE) ((4 * (MODE & 3) + 4) + 2) +#define MZ_AES_FOOTER_SIZE (10) + +/* MZ_HASH */ +#define MZ_HASH_MD5 (10) +#define MZ_HASH_MD5_SIZE (16) +#define MZ_HASH_SHA1 (20) +#define MZ_HASH_SHA1_SIZE (20) +#define MZ_HASH_SHA256 (23) +#define MZ_HASH_SHA256_SIZE (32) +#define MZ_HASH_MAX_SIZE (256) + +/* MZ_ENCODING */ +#define MZ_ENCODING_CODEPAGE_437 (437) +#define MZ_ENCODING_CODEPAGE_932 (932) +#define MZ_ENCODING_CODEPAGE_936 (936) +#define MZ_ENCODING_CODEPAGE_950 (950) +#define MZ_ENCODING_UTF8 (65001) + +/* MZ_UTILITY */ +#define MZ_UNUSED(SYMBOL) ((void)SYMBOL) + +#ifndef MZ_CUSTOM_ALLOC +#define MZ_ALLOC(SIZE) (malloc((SIZE))) +#endif +#ifndef MZ_CUSTOM_FREE +#define MZ_FREE(PTR) (free(PTR)) +#endif + +#if defined(_WIN32) && defined(MZ_EXPORTS) +#define MZ_EXPORT __declspec(dllexport) +#else +#define MZ_EXPORT +#endif + +/***************************************************************************/ + +#include /* size_t, NULL, malloc */ +#include /* time_t, time() */ +#include /* memset, strncpy, strlen */ +#include + +#if defined(HAVE_STDINT_H) +# include +#elif defined(__has_include) +# if __has_include() +# include +# endif +#endif + +#ifndef INT8_MAX +typedef signed char int8_t; +#endif +#ifndef INT16_MAX +typedef short int16_t; +#endif +#ifndef INT32_MAX +typedef int int32_t; +#endif +#ifndef INT64_MAX +typedef long long int64_t; +#endif +#ifndef UINT8_MAX +typedef unsigned char uint8_t; +#endif +#ifndef UINT16_MAX +typedef unsigned short uint16_t; +#endif +#ifndef UINT32_MAX +typedef unsigned int uint32_t; +#endif +#ifndef UINT64_MAX +typedef unsigned long long uint64_t; +#endif + +#if defined(HAVE_INTTYPES_H) +# include +#elif defined(__has_include) +# if __has_include() +# include +# endif +#endif + +#ifndef PRId8 +# define PRId8 "hhd" +#endif +#ifndef PRIu8 +# define PRIu8 "hhu" +#endif +#ifndef PRIx8 +# define PRIx8 "hhx" +#endif +#ifndef PRId16 +# define PRId16 "hd" +#endif +#ifndef PRIu16 +# define PRIu16 "hu" +#endif +#ifndef PRIx16 +# define PRIx16 "hx" +#endif +#ifndef PRId32 +# define PRId32 "d" +#endif +#ifndef PRIu32 +# define PRIu32 "u" +#endif +#ifndef PRIx32 +# define PRIx32 "x" +#endif +#if ULONG_MAX == 0xfffffffful +# ifndef PRId64 +# define PRId64 "ld" +# endif +# ifndef PRIu64 +# define PRIu64 "lu" +# endif +# ifndef PRIx64 +# define PRIx64 "lx" +# endif +#else +# ifndef PRId64 +# define PRId64 "lld" +# endif +# ifndef PRIu64 +# define PRIu64 "llu" +# endif +# ifndef PRIx64 +# define PRIx64 "llx" +# endif +#endif + +#ifndef INT16_MAX +# define INT16_MAX 32767 +#endif +#ifndef INT32_MAX +# define INT32_MAX 2147483647L +#endif +#ifndef INT64_MAX +# define INT64_MAX 9223372036854775807LL +#endif +#ifndef UINT16_MAX +# define UINT16_MAX 65535U +#endif +#ifndef UINT32_MAX +# define UINT32_MAX 4294967295UL +#endif +#ifndef UINT64_MAX +# define UINT64_MAX 18446744073709551615ULL +#endif + +/***************************************************************************/ + +#endif diff --git a/Externals/minizip/mz_compat.c b/Externals/minizip/mz_compat.c new file mode 100644 index 000000000000..5a08046a7f7c --- /dev/null +++ b/Externals/minizip/mz_compat.c @@ -0,0 +1,1303 @@ +/* mz_compat.c -- Backwards compatible interface for older versions + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_mem.h" +#include "mz_strm_os.h" +#include "mz_strm_zlib.h" +#include "mz_zip.h" + +#include /* SEEK */ + +#include "mz_compat.h" + +/***************************************************************************/ + +typedef struct mz_compat_s { + void *stream; + void *handle; + uint64_t entry_index; + int64_t entry_pos; + int64_t total_out; +} mz_compat; + +/***************************************************************************/ + +typedef struct mz_stream_ioapi_s { + mz_stream stream; + void *handle; + zlib_filefunc_def filefunc; + zlib_filefunc64_def filefunc64; +} mz_stream_ioapi; + +/***************************************************************************/ + +static int32_t mz_stream_ioapi_open(void *stream, const char *path, int32_t mode); +static int32_t mz_stream_ioapi_is_open(void *stream); +static int32_t mz_stream_ioapi_read(void *stream, void *buf, int32_t size); +static int32_t mz_stream_ioapi_write(void *stream, const void *buf, int32_t size); +static int64_t mz_stream_ioapi_tell(void *stream); +static int32_t mz_stream_ioapi_seek(void *stream, int64_t offset, int32_t origin); +static int32_t mz_stream_ioapi_close(void *stream); +static int32_t mz_stream_ioapi_error(void *stream); +static void *mz_stream_ioapi_create(void **stream); +static void mz_stream_ioapi_delete(void **stream); + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_ioapi_vtbl = { + mz_stream_ioapi_open, + mz_stream_ioapi_is_open, + mz_stream_ioapi_read, + mz_stream_ioapi_write, + mz_stream_ioapi_tell, + mz_stream_ioapi_seek, + mz_stream_ioapi_close, + mz_stream_ioapi_error, + mz_stream_ioapi_create, + mz_stream_ioapi_delete, + NULL, + NULL +}; + +/***************************************************************************/ + +static int32_t mz_stream_ioapi_open(void *stream, const char *path, int32_t mode) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + int32_t ioapi_mode = 0; + + if ((mode & MZ_OPEN_MODE_READWRITE) == MZ_OPEN_MODE_READ) + ioapi_mode = ZLIB_FILEFUNC_MODE_READ; + else if (mode & MZ_OPEN_MODE_APPEND) + ioapi_mode = ZLIB_FILEFUNC_MODE_EXISTING; + else if (mode & MZ_OPEN_MODE_CREATE) + ioapi_mode = ZLIB_FILEFUNC_MODE_CREATE; + else + return MZ_OPEN_ERROR; + + if (ioapi->filefunc64.zopen64_file != NULL) + ioapi->handle = ioapi->filefunc64.zopen64_file(ioapi->filefunc64.opaque, path, ioapi_mode); + else if (ioapi->filefunc.zopen_file != NULL) + ioapi->handle = ioapi->filefunc.zopen_file(ioapi->filefunc.opaque, path, ioapi_mode); + + if (ioapi->handle == NULL) + return MZ_PARAM_ERROR; + + return MZ_OK; +} + +static int32_t mz_stream_ioapi_is_open(void *stream) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + if (ioapi->handle == NULL) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +static int32_t mz_stream_ioapi_read(void *stream, void *buf, int32_t size) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + read_file_func zread = NULL; + void *opaque = NULL; + + if (mz_stream_ioapi_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (ioapi->filefunc64.zread_file != NULL) { + zread = ioapi->filefunc64.zread_file; + opaque = ioapi->filefunc64.opaque; + } else if (ioapi->filefunc.zread_file != NULL) { + zread = ioapi->filefunc.zread_file; + opaque = ioapi->filefunc.opaque; + } else + return MZ_PARAM_ERROR; + + return zread(opaque, ioapi->handle, buf, size); +} + +static int32_t mz_stream_ioapi_write(void *stream, const void *buf, int32_t size) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + write_file_func zwrite = NULL; + int32_t written = 0; + void *opaque = NULL; + + if (mz_stream_ioapi_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (ioapi->filefunc64.zwrite_file != NULL) { + zwrite = ioapi->filefunc64.zwrite_file; + opaque = ioapi->filefunc64.opaque; + } else if (ioapi->filefunc.zwrite_file != NULL) { + zwrite = ioapi->filefunc.zwrite_file; + opaque = ioapi->filefunc.opaque; + } else + return MZ_PARAM_ERROR; + + written = zwrite(opaque, ioapi->handle, buf, size); + return written; +} + +static int64_t mz_stream_ioapi_tell(void *stream) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + + if (mz_stream_ioapi_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (ioapi->filefunc64.ztell64_file != NULL) + return ioapi->filefunc64.ztell64_file(ioapi->filefunc64.opaque, ioapi->handle); + else if (ioapi->filefunc.ztell_file != NULL) + return ioapi->filefunc.ztell_file(ioapi->filefunc.opaque, ioapi->handle); + + return MZ_INTERNAL_ERROR; +} + +static int32_t mz_stream_ioapi_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + int32_t written = 0; + void *opaque = NULL; + + if (mz_stream_ioapi_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (ioapi->filefunc64.zseek64_file != NULL) { + if (ioapi->filefunc64.zseek64_file(ioapi->filefunc64.opaque, ioapi->handle, offset, origin) != 0) + return MZ_INTERNAL_ERROR; + } else if (ioapi->filefunc.zseek_file != NULL) { + if (ioapi->filefunc.zseek_file(ioapi->filefunc.opaque, ioapi->handle, (int32_t)offset, origin) != 0) + return MZ_INTERNAL_ERROR; + } else + return MZ_PARAM_ERROR; + + return MZ_OK; +} + +static int32_t mz_stream_ioapi_close(void *stream) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + close_file_func zclose = NULL; + void *opaque = NULL; + + if (mz_stream_ioapi_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (ioapi->filefunc.zclose_file != NULL) { + zclose = ioapi->filefunc.zclose_file; + opaque = ioapi->filefunc.opaque; + } else if (ioapi->filefunc64.zclose_file != NULL) { + zclose = ioapi->filefunc64.zclose_file; + opaque = ioapi->filefunc64.opaque; + } else + return MZ_PARAM_ERROR; + + if (zclose(opaque, ioapi->handle) != 0) + return MZ_CLOSE_ERROR; + ioapi->handle = NULL; + return MZ_OK; +} + +static int32_t mz_stream_ioapi_error(void *stream) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + testerror_file_func zerror = NULL; + void *opaque = NULL; + + if (mz_stream_ioapi_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (ioapi->filefunc.zerror_file != NULL) { + zerror = ioapi->filefunc.zerror_file; + opaque = ioapi->filefunc.opaque; + } else if (ioapi->filefunc64.zerror_file != NULL) { + zerror = ioapi->filefunc64.zerror_file; + opaque = ioapi->filefunc64.opaque; + } else + return MZ_PARAM_ERROR; + + return zerror(opaque, ioapi->handle); +} + +static int32_t mz_stream_ioapi_set_filefunc(void *stream, zlib_filefunc_def *filefunc) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + memcpy(&ioapi->filefunc, filefunc, sizeof(zlib_filefunc_def)); + return MZ_OK; +} + +static int32_t mz_stream_ioapi_set_filefunc64(void *stream, zlib_filefunc64_def *filefunc) { + mz_stream_ioapi *ioapi = (mz_stream_ioapi *)stream; + memcpy(&ioapi->filefunc64, filefunc, sizeof(zlib_filefunc64_def)); + return MZ_OK; +} + +static void *mz_stream_ioapi_create(void **stream) { + mz_stream_ioapi *ioapi = NULL; + + ioapi = (mz_stream_ioapi *)MZ_ALLOC(sizeof(mz_stream_ioapi)); + if (ioapi != NULL) { + memset(ioapi, 0, sizeof(mz_stream_ioapi)); + ioapi->stream.vtbl = &mz_stream_ioapi_vtbl; + } + if (stream != NULL) + *stream = ioapi; + + return ioapi; +} + +static void mz_stream_ioapi_delete(void **stream) { + mz_stream_ioapi *ioapi = NULL; + if (stream == NULL) + return; + ioapi = (mz_stream_ioapi *)*stream; + if (ioapi != NULL) + MZ_FREE(ioapi); + *stream = NULL; +} + +/***************************************************************************/ + +void fill_fopen_filefunc(zlib_filefunc_def *pzlib_filefunc_def) { + /* For 32-bit file support only, compile with MZ_FILE32_API */ + if (pzlib_filefunc_def != NULL) + memset(pzlib_filefunc_def, 0, sizeof(zlib_filefunc_def)); +} + +void fill_fopen64_filefunc(zlib_filefunc64_def *pzlib_filefunc_def) { + /* All mz_stream_os_* support large files if compilation supports it */ + if (pzlib_filefunc_def != NULL) + memset(pzlib_filefunc_def, 0, sizeof(zlib_filefunc64_def)); +} + +void fill_win32_filefunc(zlib_filefunc_def *pzlib_filefunc_def) { + /* Handled by mz_stream_os_win32 */ + if (pzlib_filefunc_def != NULL) + memset(pzlib_filefunc_def, 0, sizeof(zlib_filefunc_def)); +} + +void fill_win32_filefunc64(zlib_filefunc64_def *pzlib_filefunc_def) { + /* Automatically supported in mz_stream_os_win32 */ + if (pzlib_filefunc_def != NULL) + memset(pzlib_filefunc_def, 0, sizeof(zlib_filefunc64_def)); +} + +void fill_win32_filefunc64A(zlib_filefunc64_def *pzlib_filefunc_def) { + /* Automatically supported in mz_stream_os_win32 */ + if (pzlib_filefunc_def != NULL) + memset(pzlib_filefunc_def, 0, sizeof(zlib_filefunc64_def)); +} + +/* NOTE: fill_win32_filefunc64W is no longer necessary since wide-character + support is automatically handled by the underlying os stream. Do not + pass wide-characters to zipOpen or unzOpen. */ + +void fill_memory_filefunc(zlib_filefunc_def *pzlib_filefunc_def) { + /* Use opaque to indicate which stream interface to create */ + if (pzlib_filefunc_def != NULL) { + memset(pzlib_filefunc_def, 0, sizeof(zlib_filefunc_def)); + pzlib_filefunc_def->opaque = mz_stream_mem_get_interface(); + } +} + +/***************************************************************************/ + +static int32_t zipConvertAppendToStreamMode(int append) { + int32_t mode = MZ_OPEN_MODE_WRITE; + switch (append) { + case APPEND_STATUS_CREATE: + mode |= MZ_OPEN_MODE_CREATE; + break; + case APPEND_STATUS_CREATEAFTER: + mode |= MZ_OPEN_MODE_CREATE | MZ_OPEN_MODE_APPEND; + break; + case APPEND_STATUS_ADDINZIP: + mode |= MZ_OPEN_MODE_READ | MZ_OPEN_MODE_APPEND; + break; + } + return mode; +} + +zipFile zipOpen(const char *path, int append) { + return zipOpen2(path, append, NULL, NULL); +} + +zipFile zipOpen64(const void *path, int append) { + return zipOpen2(path, append, NULL, NULL); +} + +zipFile zipOpen2(const char *path, int append, const char **globalcomment, + zlib_filefunc_def *pzlib_filefunc_def) { + zipFile zip = NULL; + int32_t mode = zipConvertAppendToStreamMode(append); + void *stream = NULL; + + if (pzlib_filefunc_def) { + if (pzlib_filefunc_def->zopen_file != NULL) { + if (mz_stream_ioapi_create(&stream) == NULL) + return NULL; + mz_stream_ioapi_set_filefunc(stream, pzlib_filefunc_def); + } else if (pzlib_filefunc_def->opaque != NULL) { + if (mz_stream_create(&stream, (mz_stream_vtbl *)pzlib_filefunc_def->opaque) == NULL) + return NULL; + } + } + + if (stream == NULL) { + if (mz_stream_os_create(&stream) == NULL) + return NULL; + } + + if (mz_stream_open(stream, path, mode) != MZ_OK) { + mz_stream_delete(&stream); + return NULL; + } + + zip = zipOpen_MZ(stream, append, globalcomment); + + if (zip == NULL) { + mz_stream_delete(&stream); + return NULL; + } + + return zip; +} + +zipFile zipOpen2_64(const void *path, int append, const char **globalcomment, + zlib_filefunc64_def *pzlib_filefunc_def) { + zipFile zip = NULL; + int32_t mode = zipConvertAppendToStreamMode(append); + void *stream = NULL; + + if (pzlib_filefunc_def) { + if (pzlib_filefunc_def->zopen64_file != NULL) { + if (mz_stream_ioapi_create(&stream) == NULL) + return NULL; + mz_stream_ioapi_set_filefunc64(stream, pzlib_filefunc_def); + } else if (pzlib_filefunc_def->opaque != NULL) { + if (mz_stream_create(&stream, (mz_stream_vtbl *)pzlib_filefunc_def->opaque) == NULL) + return NULL; + } + } + + if (stream == NULL) { + if (mz_stream_os_create(&stream) == NULL) + return NULL; + } + + if (mz_stream_open(stream, path, mode) != MZ_OK) { + mz_stream_delete(&stream); + return NULL; + } + + zip = zipOpen_MZ(stream, append, globalcomment); + + if (zip == NULL) { + mz_stream_delete(&stream); + return NULL; + } + + return zip; +} + +zipFile zipOpen_MZ(void *stream, int append, const char **globalcomment) { + mz_compat *compat = NULL; + int32_t err = MZ_OK; + int32_t mode = zipConvertAppendToStreamMode(append); + void *handle = NULL; + + mz_zip_create(&handle); + err = mz_zip_open(handle, stream, mode); + + if (err != MZ_OK) { + mz_zip_delete(&handle); + return NULL; + } + + if (globalcomment != NULL) + mz_zip_get_comment(handle, globalcomment); + + compat = (mz_compat *)MZ_ALLOC(sizeof(mz_compat)); + if (compat != NULL) { + compat->handle = handle; + compat->stream = stream; + } else { + mz_zip_delete(&handle); + } + + return (zipFile)compat; +} + +void* zipGetHandle_MZ(zipFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return NULL; + return compat->handle; +} + +void* zipGetStream_MZ(zipFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return NULL; + return (void *)compat->stream; +} + +int zipOpenNewFileInZip5(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting, unsigned long version_madeby, unsigned long flag_base, int zip64) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file file_info; + + MZ_UNUSED(strategy); + MZ_UNUSED(memLevel); + MZ_UNUSED(windowBits); + MZ_UNUSED(size_extrafield_local); + MZ_UNUSED(extrafield_local); + MZ_UNUSED(crc_for_crypting); + + if (compat == NULL) + return ZIP_PARAMERROR; + + memset(&file_info, 0, sizeof(file_info)); + + if (zipfi != NULL) { + uint64_t dos_date = 0; + + if (zipfi->mz_dos_date != 0) + dos_date = zipfi->mz_dos_date; + else + dos_date = mz_zip_tm_to_dosdate(&zipfi->tmz_date); + + file_info.modified_date = mz_zip_dosdate_to_time_t(dos_date); + file_info.external_fa = zipfi->external_fa; + file_info.internal_fa = zipfi->internal_fa; + } + + if (filename == NULL) + filename = "-"; + + file_info.compression_method = (uint16_t)compression_method; + file_info.filename = filename; + /* file_info.extrafield_local = extrafield_local; */ + /* file_info.extrafield_local_size = size_extrafield_local; */ + file_info.extrafield = extrafield_global; + file_info.extrafield_size = size_extrafield_global; + file_info.version_madeby = (uint16_t)version_madeby; + file_info.comment = comment; + if (file_info.comment != NULL) + file_info.comment_size = (uint16_t)strlen(file_info.comment); + file_info.flag = (uint16_t)flag_base; + if (zip64) + file_info.zip64 = MZ_ZIP64_FORCE; + else + file_info.zip64 = MZ_ZIP64_DISABLE; +#ifdef HAVE_WZAES + if ((password != NULL) || (raw && (file_info.flag & MZ_ZIP_FLAG_ENCRYPTED))) + file_info.aes_version = MZ_AES_VERSION; +#endif + + return mz_zip_entry_write_open(compat->handle, &file_info, (int16_t)level, (uint8_t)raw, password); +} + +int zipOpenNewFileInZip4_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting, unsigned long version_madeby, unsigned long flag_base, int zip64) { + return zipOpenNewFileInZip5(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, raw, windowBits, + memLevel, strategy, password, crc_for_crypting, version_madeby, flag_base, zip64); +} + +int zipOpenNewFileInZip4(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting, unsigned long version_madeby, unsigned long flag_base) { + return zipOpenNewFileInZip4_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, raw, windowBits, + memLevel, strategy, password, crc_for_crypting, version_madeby, flag_base, 0); +} + +int zipOpenNewFileInZip3(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting) { + return zipOpenNewFileInZip3_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, raw, windowBits, + memLevel, strategy, password, crc_for_crypting, 0); +} + +int zipOpenNewFileInZip3_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + uint32_t crc_for_crypting, int zip64) { + return zipOpenNewFileInZip4_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, raw, windowBits, + memLevel, strategy, password, crc_for_crypting, MZ_VERSION_MADEBY, 0, zip64); +} + +int zipOpenNewFileInZip2(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw) { + return zipOpenNewFileInZip3_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, raw, + 0, 0, 0, NULL, 0, 0); +} + +int zipOpenNewFileInZip2_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int zip64) { + return zipOpenNewFileInZip3_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, raw, 0, + 0, 0, NULL, 0, zip64); +} + +int zipOpenNewFileInZip(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level) { + return zipOpenNewFileInZip_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, 0); +} + +int zipOpenNewFileInZip_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int zip64) { + return zipOpenNewFileInZip2_64(file, filename, zipfi, extrafield_local, size_extrafield_local, + extrafield_global, size_extrafield_global, comment, compression_method, level, 0, zip64); +} + +int zipWriteInFileInZip(zipFile file, const void *buf, uint32_t len) { + mz_compat *compat = (mz_compat *)file; + int32_t written = 0; + if (compat == NULL || len >= INT32_MAX) + return ZIP_PARAMERROR; + written = mz_zip_entry_write(compat->handle, buf, (int32_t)len); + if ((written < 0) || ((uint32_t)written != len)) + return ZIP_ERRNO; + return ZIP_OK; +} + +int zipCloseFileInZipRaw(zipFile file, unsigned long uncompressed_size, unsigned long crc32) { + return zipCloseFileInZipRaw64(file, uncompressed_size, crc32); +} + +int zipCloseFileInZipRaw64(zipFile file, uint64_t uncompressed_size, unsigned long crc32) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return ZIP_PARAMERROR; + return mz_zip_entry_close_raw(compat->handle, (int64_t)uncompressed_size, crc32); +} + +int zipCloseFileInZip(zipFile file) { + return zipCloseFileInZip64(file); +} + +int zipCloseFileInZip64(zipFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return ZIP_PARAMERROR; + return mz_zip_entry_close(compat->handle); +} + +int zipClose(zipFile file, const char *global_comment) { + return zipClose_64(file, global_comment); +} + +int zipClose_64(zipFile file, const char *global_comment) { + return zipClose2_64(file, global_comment, MZ_VERSION_MADEBY); +} + +int zipClose2_64(zipFile file, const char *global_comment, uint16_t version_madeby) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + + if (compat->handle != NULL) + err = zipClose2_MZ(file, global_comment, version_madeby); + + if (compat->stream != NULL) { + mz_stream_close(compat->stream); + mz_stream_delete(&compat->stream); + } + + MZ_FREE(compat); + + return err; +} + +/* Only closes the zip handle, does not close the stream */ +int zipClose_MZ(zipFile file, const char *global_comment) { + return zipClose2_MZ(file, global_comment, MZ_VERSION_MADEBY); +} + +/* Only closes the zip handle, does not close the stream */ +int zipClose2_MZ(zipFile file, const char *global_comment, uint16_t version_madeby) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + + if (compat == NULL) + return ZIP_PARAMERROR; + if (compat->handle == NULL) + return err; + + if (global_comment != NULL) + mz_zip_set_comment(compat->handle, global_comment); + + mz_zip_set_version_madeby(compat->handle, version_madeby); + err = mz_zip_close(compat->handle); + mz_zip_delete(&compat->handle); + + return err; +} + +/***************************************************************************/ + +unzFile unzOpen(const char *path) { + return unzOpen64(path); +} + +unzFile unzOpen64(const void *path) { + return unzOpen2(path, NULL); +} + +unzFile unzOpen2(const char *path, zlib_filefunc_def *pzlib_filefunc_def) { + unzFile unz = NULL; + void *stream = NULL; + + if (pzlib_filefunc_def) { + if (pzlib_filefunc_def->zopen_file != NULL) { + if (mz_stream_ioapi_create(&stream) == NULL) + return NULL; + mz_stream_ioapi_set_filefunc(stream, pzlib_filefunc_def); + } else if (pzlib_filefunc_def->opaque != NULL) { + if (mz_stream_create(&stream, (mz_stream_vtbl *)pzlib_filefunc_def->opaque) == NULL) + return NULL; + } + } + + if (stream == NULL) { + if (mz_stream_os_create(&stream) == NULL) + return NULL; + } + + if (mz_stream_open(stream, path, MZ_OPEN_MODE_READ) != MZ_OK) { + mz_stream_delete(&stream); + return NULL; + } + + unz = unzOpen_MZ(stream); + if (unz == NULL) { + mz_stream_close(stream); + mz_stream_delete(&stream); + return NULL; + } + return unz; +} + +unzFile unzOpen2_64(const void *path, zlib_filefunc64_def *pzlib_filefunc_def) { + unzFile unz = NULL; + void *stream = NULL; + + if (pzlib_filefunc_def) { + if (pzlib_filefunc_def->zopen64_file != NULL) { + if (mz_stream_ioapi_create(&stream) == NULL) + return NULL; + mz_stream_ioapi_set_filefunc64(stream, pzlib_filefunc_def); + } else if (pzlib_filefunc_def->opaque != NULL) { + if (mz_stream_create(&stream, (mz_stream_vtbl *)pzlib_filefunc_def->opaque) == NULL) + return NULL; + } + } + + if (stream == NULL) { + if (mz_stream_os_create(&stream) == NULL) + return NULL; + } + + if (mz_stream_open(stream, path, MZ_OPEN_MODE_READ) != MZ_OK) { + mz_stream_delete(&stream); + return NULL; + } + + unz = unzOpen_MZ(stream); + if (unz == NULL) { + mz_stream_close(stream); + mz_stream_delete(&stream); + return NULL; + } + return unz; +} + +void* unzGetHandle_MZ(unzFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return NULL; + return compat->handle; +} + +void* unzGetStream_MZ(unzFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return NULL; + return compat->stream; +} + +unzFile unzOpen_MZ(void *stream) { + mz_compat *compat = NULL; + int32_t err = MZ_OK; + void *handle = NULL; + + mz_zip_create(&handle); + err = mz_zip_open(handle, stream, MZ_OPEN_MODE_READ); + + if (err != MZ_OK) { + mz_zip_delete(&handle); + return NULL; + } + + compat = (mz_compat *)MZ_ALLOC(sizeof(mz_compat)); + if (compat != NULL) { + compat->handle = handle; + compat->stream = stream; + + mz_zip_goto_first_entry(compat->handle); + } else { + mz_zip_delete(&handle); + } + + return (unzFile)compat; +} + +int unzClose(unzFile file) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + + if (compat == NULL) + return UNZ_PARAMERROR; + + if (compat->handle != NULL) + err = unzClose_MZ(file); + + if (compat->stream != NULL) { + mz_stream_close(compat->stream); + mz_stream_delete(&compat->stream); + } + + MZ_FREE(compat); + + return err; +} + +/* Only closes the zip handle, does not close the stream */ +int unzClose_MZ(unzFile file) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + + if (compat == NULL) + return UNZ_PARAMERROR; + + err = mz_zip_close(compat->handle); + mz_zip_delete(&compat->handle); + + return err; +} + +int unzGetGlobalInfo(unzFile file, unz_global_info* pglobal_info32) { + mz_compat *compat = (mz_compat *)file; + unz_global_info64 global_info64; + int32_t err = MZ_OK; + + memset(pglobal_info32, 0, sizeof(unz_global_info)); + if (compat == NULL) + return UNZ_PARAMERROR; + + err = unzGetGlobalInfo64(file, &global_info64); + if (err == MZ_OK) { + pglobal_info32->number_entry = (uint32_t)global_info64.number_entry; + pglobal_info32->size_comment = global_info64.size_comment; + pglobal_info32->number_disk_with_CD = global_info64.number_disk_with_CD; + } + return err; +} + +int unzGetGlobalInfo64(unzFile file, unz_global_info64 *pglobal_info) { + mz_compat *compat = (mz_compat *)file; + const char *comment_ptr = NULL; + int32_t err = MZ_OK; + + memset(pglobal_info, 0, sizeof(unz_global_info64)); + if (compat == NULL) + return UNZ_PARAMERROR; + err = mz_zip_get_comment(compat->handle, &comment_ptr); + if (err == MZ_OK) + pglobal_info->size_comment = (uint16_t)strlen(comment_ptr); + if ((err == MZ_OK) || (err == MZ_EXIST_ERROR)) + err = mz_zip_get_number_entry(compat->handle, &pglobal_info->number_entry); + if (err == MZ_OK) + err = mz_zip_get_disk_number_with_cd(compat->handle, &pglobal_info->number_disk_with_CD); + return err; +} + +int unzGetGlobalComment(unzFile file, char *comment, unsigned long comment_size) { + mz_compat *compat = (mz_compat *)file; + const char *comment_ptr = NULL; + int32_t err = MZ_OK; + + if (comment == NULL || comment_size == 0) + return UNZ_PARAMERROR; + err = mz_zip_get_comment(compat->handle, &comment_ptr); + if (err == MZ_OK) { + strncpy(comment, comment_ptr, comment_size - 1); + comment[comment_size - 1] = 0; + } + return err; +} + +int unzOpenCurrentFile3(unzFile file, int *method, int *level, int raw, const char *password) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + int32_t err = MZ_OK; + void *stream = NULL; + + if (compat == NULL) + return UNZ_PARAMERROR; + if (method != NULL) + *method = 0; + if (level != NULL) + *level = 0; + + compat->total_out = 0; + err = mz_zip_entry_read_open(compat->handle, (uint8_t)raw, password); + if (err == MZ_OK) + err = mz_zip_entry_get_info(compat->handle, &file_info); + if (err == MZ_OK) { + if (method != NULL) { + *method = file_info->compression_method; + } + + if (level != NULL) { + *level = 6; + switch (file_info->flag & 0x06) { + case MZ_ZIP_FLAG_DEFLATE_SUPER_FAST: + *level = 1; + break; + case MZ_ZIP_FLAG_DEFLATE_FAST: + *level = 2; + break; + case MZ_ZIP_FLAG_DEFLATE_MAX: + *level = 9; + break; + } + } + } + if (err == MZ_OK) + err = mz_zip_get_stream(compat->handle, &stream); + if (err == MZ_OK) + compat->entry_pos = mz_stream_tell(stream); + return err; +} + +int unzOpenCurrentFile(unzFile file) { + return unzOpenCurrentFile3(file, NULL, NULL, 0, NULL); +} + +int unzOpenCurrentFilePassword(unzFile file, const char *password) { + return unzOpenCurrentFile3(file, NULL, NULL, 0, password); +} + +int unzOpenCurrentFile2(unzFile file, int *method, int *level, int raw) { + return unzOpenCurrentFile3(file, method, level, raw, NULL); +} + +int unzReadCurrentFile(unzFile file, void *buf, uint32_t len) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + if (compat == NULL || len >= INT32_MAX) + return UNZ_PARAMERROR; + err = mz_zip_entry_read(compat->handle, buf, (int32_t)len); + if (err > 0) + compat->total_out += (uint32_t)err; + return err; +} + +int unzCloseCurrentFile(unzFile file) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + if (compat == NULL) + return UNZ_PARAMERROR; + err = mz_zip_entry_close(compat->handle); + return err; +} + +int unzGetCurrentFileInfo(unzFile file, unz_file_info *pfile_info, char *filename, + unsigned long filename_size, void *extrafield, unsigned long extrafield_size, char *comment, + unsigned long comment_size) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + uint16_t bytes_to_copy = 0; + int32_t err = MZ_OK; + + if (compat == NULL) + return UNZ_PARAMERROR; + + err = mz_zip_entry_get_info(compat->handle, &file_info); + if (err != MZ_OK) + return err; + + if (pfile_info != NULL) { + pfile_info->version = file_info->version_madeby; + pfile_info->version_needed = file_info->version_needed; + pfile_info->flag = file_info->flag; + pfile_info->compression_method = file_info->compression_method; + pfile_info->mz_dos_date = mz_zip_time_t_to_dos_date(file_info->modified_date); + mz_zip_time_t_to_tm(file_info->modified_date, &pfile_info->tmu_date); + pfile_info->tmu_date.tm_year += 1900; + pfile_info->crc = file_info->crc; + + pfile_info->size_filename = file_info->filename_size; + pfile_info->size_file_extra = file_info->extrafield_size; + pfile_info->size_file_comment = file_info->comment_size; + + pfile_info->disk_num_start = (uint16_t)file_info->disk_number; + pfile_info->internal_fa = file_info->internal_fa; + pfile_info->external_fa = file_info->external_fa; + + pfile_info->compressed_size = (uint32_t)file_info->compressed_size; + pfile_info->uncompressed_size = (uint32_t)file_info->uncompressed_size; + } + if (filename_size > 0 && filename != NULL && file_info->filename != NULL) { + bytes_to_copy = (uint16_t)filename_size; + if (bytes_to_copy > file_info->filename_size) + bytes_to_copy = file_info->filename_size; + memcpy(filename, file_info->filename, bytes_to_copy); + if (bytes_to_copy < filename_size) + filename[bytes_to_copy] = 0; + } + if (extrafield_size > 0 && extrafield != NULL) { + bytes_to_copy = (uint16_t)extrafield_size; + if (bytes_to_copy > file_info->extrafield_size) + bytes_to_copy = file_info->extrafield_size; + memcpy(extrafield, file_info->extrafield, bytes_to_copy); + } + if (comment_size > 0 && comment != NULL && file_info->comment != NULL) { + bytes_to_copy = (uint16_t)comment_size; + if (bytes_to_copy > file_info->comment_size) + bytes_to_copy = file_info->comment_size; + memcpy(comment, file_info->comment, bytes_to_copy); + if (bytes_to_copy < comment_size) + comment[bytes_to_copy] = 0; + } + return err; +} + +int unzGetCurrentFileInfo64(unzFile file, unz_file_info64 * pfile_info, char *filename, + unsigned long filename_size, void *extrafield, unsigned long extrafield_size, char *comment, + unsigned long comment_size) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + uint16_t bytes_to_copy = 0; + int32_t err = MZ_OK; + + if (compat == NULL) + return UNZ_PARAMERROR; + + err = mz_zip_entry_get_info(compat->handle, &file_info); + if (err != MZ_OK) + return err; + + if (pfile_info != NULL) { + pfile_info->version = file_info->version_madeby; + pfile_info->version_needed = file_info->version_needed; + pfile_info->flag = file_info->flag; + pfile_info->compression_method = file_info->compression_method; + pfile_info->mz_dos_date = mz_zip_time_t_to_dos_date(file_info->modified_date); + mz_zip_time_t_to_tm(file_info->modified_date, &pfile_info->tmu_date); + pfile_info->tmu_date.tm_year += 1900; + pfile_info->crc = file_info->crc; + + pfile_info->size_filename = file_info->filename_size; + pfile_info->size_file_extra = file_info->extrafield_size; + pfile_info->size_file_comment = file_info->comment_size; + + pfile_info->disk_num_start = file_info->disk_number; + pfile_info->internal_fa = file_info->internal_fa; + pfile_info->external_fa = file_info->external_fa; + + pfile_info->compressed_size = (uint64_t)file_info->compressed_size; + pfile_info->uncompressed_size = (uint64_t)file_info->uncompressed_size; + } + if (filename_size > 0 && filename != NULL && file_info->filename != NULL) { + bytes_to_copy = (uint16_t)filename_size; + if (bytes_to_copy > file_info->filename_size) + bytes_to_copy = file_info->filename_size; + memcpy(filename, file_info->filename, bytes_to_copy); + if (bytes_to_copy < filename_size) + filename[bytes_to_copy] = 0; + } + if (extrafield_size > 0 && extrafield != NULL) { + bytes_to_copy = (uint16_t)extrafield_size; + if (bytes_to_copy > file_info->extrafield_size) + bytes_to_copy = file_info->extrafield_size; + memcpy(extrafield, file_info->extrafield, bytes_to_copy); + } + if (comment_size > 0 && comment != NULL && file_info->comment != NULL) { + bytes_to_copy = (uint16_t)comment_size; + if (bytes_to_copy > file_info->comment_size) + bytes_to_copy = file_info->comment_size; + memcpy(comment, file_info->comment, bytes_to_copy); + if (bytes_to_copy < comment_size) + comment[bytes_to_copy] = 0; + } + return err; +} + +int unzGoToFirstFile(unzFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return UNZ_PARAMERROR; + compat->entry_index = 0; + return mz_zip_goto_first_entry(compat->handle); +} + +int unzGoToNextFile(unzFile file) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + if (compat == NULL) + return UNZ_PARAMERROR; + err = mz_zip_goto_next_entry(compat->handle); + if (err != MZ_END_OF_LIST) + compat->entry_index += 1; + return err; +} + +int unzLocateFile(unzFile file, const char *filename, unzFileNameComparer filename_compare_func) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + uint64_t preserve_index = 0; + int32_t err = MZ_OK; + int32_t result = 0; + + if (compat == NULL) + return UNZ_PARAMERROR; + + preserve_index = compat->entry_index; + + err = mz_zip_goto_first_entry(compat->handle); + while (err == MZ_OK) { + err = mz_zip_entry_get_info(compat->handle, &file_info); + if (err != MZ_OK) + break; + + if ((intptr_t)filename_compare_func > 2) { + result = filename_compare_func(file, filename, file_info->filename); + } else { + int32_t case_sensitive = (int32_t)(intptr_t)filename_compare_func; + result = mz_path_compare_wc(filename, file_info->filename, !case_sensitive); + } + + if (result == 0) + return MZ_OK; + + err = mz_zip_goto_next_entry(compat->handle); + } + + compat->entry_index = preserve_index; + return err; +} + +/***************************************************************************/ + +int unzGetFilePos(unzFile file, unz_file_pos *file_pos) { + unz64_file_pos file_pos64; + int32_t err = 0; + + err = unzGetFilePos64(file, &file_pos64); + if (err < 0) + return err; + + file_pos->pos_in_zip_directory = (uint32_t)file_pos64.pos_in_zip_directory; + file_pos->num_of_file = (uint32_t)file_pos64.num_of_file; + return err; +} + +int unzGoToFilePos(unzFile file, unz_file_pos *file_pos) { + mz_compat *compat = (mz_compat *)file; + unz64_file_pos file_pos64; + + if (compat == NULL || file_pos == NULL) + return UNZ_PARAMERROR; + + file_pos64.pos_in_zip_directory = file_pos->pos_in_zip_directory; + file_pos64.num_of_file = file_pos->num_of_file; + + return unzGoToFilePos64(file, &file_pos64); +} + +int unzGetFilePos64(unzFile file, unz64_file_pos *file_pos) { + mz_compat *compat = (mz_compat *)file; + int64_t offset = 0; + + if (compat == NULL || file_pos == NULL) + return UNZ_PARAMERROR; + + offset = unzGetOffset64(file); + if (offset < 0) + return (int)offset; + + file_pos->pos_in_zip_directory = offset; + file_pos->num_of_file = compat->entry_index; + return UNZ_OK; +} + +int unzGoToFilePos64(unzFile file, const unz64_file_pos *file_pos) { + mz_compat *compat = (mz_compat *)file; + int32_t err = MZ_OK; + + if (compat == NULL || file_pos == NULL) + return UNZ_PARAMERROR; + + err = mz_zip_goto_entry(compat->handle, file_pos->pos_in_zip_directory); + if (err == MZ_OK) + compat->entry_index = file_pos->num_of_file; + return err; +} + +unsigned long unzGetOffset(unzFile file) { + return (uint32_t)unzGetOffset64(file); +} + +int64_t unzGetOffset64(unzFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return UNZ_PARAMERROR; + return mz_zip_get_entry(compat->handle); +} + +int unzSetOffset(unzFile file, unsigned long pos) { + return unzSetOffset64(file, pos); +} + +int unzSetOffset64(unzFile file, int64_t pos) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return UNZ_PARAMERROR; + return (int)mz_zip_goto_entry(compat->handle, pos); +} + +int unzGetLocalExtrafield(unzFile file, void *buf, unsigned int len) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + int32_t err = MZ_OK; + int32_t bytes_to_copy = 0; + + if (compat == NULL || buf == NULL || len >= INT32_MAX) + return UNZ_PARAMERROR; + + err = mz_zip_entry_get_local_info(compat->handle, &file_info); + if (err != MZ_OK) + return err; + + bytes_to_copy = (int32_t)len; + if (bytes_to_copy > file_info->extrafield_size) + bytes_to_copy = file_info->extrafield_size; + + memcpy(buf, file_info->extrafield, bytes_to_copy); + return MZ_OK; +} + +int32_t unzTell(unzFile file) { + return unztell(file); +} + +int32_t unztell(unzFile file) { + return (int32_t)unztell64(file); +} + +uint64_t unzTell64(unzFile file) { + return unztell64(file); +} + +uint64_t unztell64(unzFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return UNZ_PARAMERROR; + return compat->total_out; +} + +int unzSeek(unzFile file, int32_t offset, int origin) { + return unzSeek64(file, offset, origin); +} + +int unzSeek64(unzFile file, int64_t offset, int origin) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + int64_t position = 0; + int32_t err = MZ_OK; + void *stream = NULL; + + if (compat == NULL) + return UNZ_PARAMERROR; + err = mz_zip_entry_get_info(compat->handle, &file_info); + if (err != MZ_OK) + return err; + if (file_info->compression_method != MZ_COMPRESS_METHOD_STORE) + return UNZ_ERRNO; + + if (origin == SEEK_SET) + position = offset; + else if (origin == SEEK_CUR) + position = compat->total_out + offset; + else if (origin == SEEK_END) + position = (int64_t)file_info->compressed_size + offset; + else + return UNZ_PARAMERROR; + + if (position > (int64_t)file_info->compressed_size) + return UNZ_PARAMERROR; + + err = mz_zip_get_stream(compat->handle, &stream); + if (err == MZ_OK) + err = mz_stream_seek(stream, compat->entry_pos + position, MZ_SEEK_SET); + if (err == MZ_OK) + compat->total_out = position; + return err; +} + +int unzEndOfFile(unzFile file) { + return unzeof(file); +} + +int unzeof(unzFile file) { + mz_compat *compat = (mz_compat *)file; + mz_zip_file *file_info = NULL; + int32_t err = MZ_OK; + + if (compat == NULL) + return UNZ_PARAMERROR; + err = mz_zip_entry_get_info(compat->handle, &file_info); + if (err != MZ_OK) + return err; + if (compat->total_out == (int64_t)file_info->uncompressed_size) + return 1; + return 0; +} + +void* unzGetStream(unzFile file) { + mz_compat *compat = (mz_compat *)file; + if (compat == NULL) + return NULL; + return (void *)compat->stream; +} + +/***************************************************************************/ diff --git a/Externals/minizip/mz_compat.h b/Externals/minizip/mz_compat.h new file mode 100644 index 000000000000..808cc74e1f08 --- /dev/null +++ b/Externals/minizip/mz_compat.h @@ -0,0 +1,396 @@ +/* mz_compat.h -- Backwards compatible interface for older versions + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_COMPAT_H +#define MZ_COMPAT_H + +#include "mz.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +#if defined(HAVE_ZLIB) && defined(MAX_MEM_LEVEL) +#ifndef DEF_MEM_LEVEL +# if MAX_MEM_LEVEL >= 8 +# define DEF_MEM_LEVEL 8 +# else +# define DEF_MEM_LEVEL MAX_MEM_LEVEL +# endif +#endif +#endif +#ifndef MAX_WBITS +#define MAX_WBITS (15) +#endif +#ifndef DEF_MEM_LEVEL +#define DEF_MEM_LEVEL (8) +#endif + +#ifndef ZEXPORT +# define ZEXPORT MZ_EXPORT +#endif + +/***************************************************************************/ + +#if defined(STRICTZIP) || defined(STRICTZIPUNZIP) +/* like the STRICT of WIN32, we define a pointer that cannot be converted + from (void*) without cast */ +typedef struct TagzipFile__ { int unused; } zip_file__; +typedef zip_file__ *zipFile; +#else +typedef void *zipFile; +#endif + +/***************************************************************************/ + +typedef uint64_t ZPOS64_T; + +#ifndef ZCALLBACK +#define ZCALLBACK +#endif + +typedef void* (ZCALLBACK *open_file_func) (void *opaque, const char *filename, int mode); +typedef void* (ZCALLBACK *open64_file_func) (void *opaque, const void *filename, int mode); +typedef unsigned long (ZCALLBACK *read_file_func) (void *opaque, void *stream, void* buf, unsigned long size); +typedef unsigned long (ZCALLBACK *write_file_func) (void *opaque, void *stream, const void* buf, + unsigned long size); +typedef int (ZCALLBACK *close_file_func) (void *opaque, void *stream); +typedef int (ZCALLBACK *testerror_file_func)(void *opaque, void *stream); +typedef long (ZCALLBACK *tell_file_func) (void *opaque, void *stream); +typedef ZPOS64_T (ZCALLBACK *tell64_file_func) (void *opaque, void *stream); +typedef long (ZCALLBACK *seek_file_func) (void *opaque, void *stream, unsigned long offset, int origin); +typedef long (ZCALLBACK *seek64_file_func) (void *opaque, void *stream, ZPOS64_T offset, int origin); + +typedef struct zlib_filefunc_def_s +{ + open_file_func zopen_file; + read_file_func zread_file; + write_file_func zwrite_file; + tell_file_func ztell_file; + seek_file_func zseek_file; + close_file_func zclose_file; + testerror_file_func zerror_file; + void* opaque; +} zlib_filefunc_def; + +typedef struct zlib_filefunc64_def_s +{ + open64_file_func zopen64_file; + read_file_func zread_file; + write_file_func zwrite_file; + tell64_file_func ztell64_file; + seek64_file_func zseek64_file; + close_file_func zclose_file; + testerror_file_func zerror_file; + void* opaque; +} zlib_filefunc64_def; + +/***************************************************************************/ + +#define ZLIB_FILEFUNC_SEEK_SET (0) +#define ZLIB_FILEFUNC_SEEK_CUR (1) +#define ZLIB_FILEFUNC_SEEK_END (2) + +#define ZLIB_FILEFUNC_MODE_READ (1) +#define ZLIB_FILEFUNC_MODE_WRITE (2) +#define ZLIB_FILEFUNC_MODE_READWRITEFILTER (3) + +#define ZLIB_FILEFUNC_MODE_EXISTING (4) +#define ZLIB_FILEFUNC_MODE_CREATE (8) + +/***************************************************************************/ + +ZEXPORT void fill_fopen_filefunc(zlib_filefunc_def *pzlib_filefunc_def); +ZEXPORT void fill_fopen64_filefunc(zlib_filefunc64_def *pzlib_filefunc_def); +ZEXPORT void fill_win32_filefunc(zlib_filefunc_def *pzlib_filefunc_def); +ZEXPORT void fill_win32_filefunc64(zlib_filefunc64_def *pzlib_filefunc_def); +ZEXPORT void fill_win32_filefunc64A(zlib_filefunc64_def *pzlib_filefunc_def); +ZEXPORT void fill_memory_filefunc(zlib_filefunc_def *pzlib_filefunc_def); + +/***************************************************************************/ + +#if MZ_COMPAT_VERSION <= 110 +#define mz_dos_date dosDate +#else +#define mz_dos_date dos_date +#endif + +typedef struct tm tm_unz; +typedef struct tm tm_zip; + +typedef struct { + uint32_t mz_dos_date; + struct tm tmz_date; + uint16_t internal_fa; /* internal file attributes 2 bytes */ + uint32_t external_fa; /* external file attributes 4 bytes */ +} zip_fileinfo; + +typedef const char *zipcharpc; + +/***************************************************************************/ + +#define ZIP_OK (0) +#define ZIP_EOF (0) +#define ZIP_ERRNO (-1) +#define ZIP_PARAMERROR (-102) +#define ZIP_BADZIPFILE (-103) +#define ZIP_INTERNALERROR (-104) + +#ifndef Z_DEFLATED +#define Z_DEFLATED (8) +#endif +#define Z_BZIP2ED (12) + +#define APPEND_STATUS_CREATE (0) +#define APPEND_STATUS_CREATEAFTER (1) +#define APPEND_STATUS_ADDINZIP (2) + +/***************************************************************************/ +/* Writing a zip file */ + +ZEXPORT zipFile zipOpen(const char *path, int append); +ZEXPORT zipFile zipOpen64(const void *path, int append); +ZEXPORT zipFile zipOpen2(const char *path, int append, const char **globalcomment, + zlib_filefunc_def *pzlib_filefunc_def); + +ZEXPORT zipFile zipOpen2_64(const void *path, int append, const char **globalcomment, + zlib_filefunc64_def *pzlib_filefunc_def); +ZEXPORT zipFile zipOpen_MZ(void *stream, int append, const char **globalcomment); + +ZEXPORT void* zipGetHandle_MZ(zipFile); +ZEXPORT void* zipGetStream_MZ(zipFile file); + +ZEXPORT int zipOpenNewFileInZip(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level); +ZEXPORT int zipOpenNewFileInZip_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int zip64); +ZEXPORT int zipOpenNewFileInZip2(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw); +ZEXPORT int zipOpenNewFileInZip2_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int zip64); +ZEXPORT int zipOpenNewFileInZip3(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting); +ZEXPORT int zipOpenNewFileInZip3_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + uint32_t crc_for_crypting, int zip64); +ZEXPORT int zipOpenNewFileInZip4(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting, unsigned long version_madeby, unsigned long flag_base); +ZEXPORT int zipOpenNewFileInZip4_64(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting, unsigned long version_madeby, unsigned long flag_base, int zip64); +ZEXPORT int zipOpenNewFileInZip5(zipFile file, const char *filename, const zip_fileinfo *zipfi, + const void *extrafield_local, uint16_t size_extrafield_local, const void *extrafield_global, + uint16_t size_extrafield_global, const char *comment, int compression_method, int level, + int raw, int windowBits, int memLevel, int strategy, const char *password, + unsigned long crc_for_crypting, unsigned long version_madeby, unsigned long flag_base, int zip64); + +ZEXPORT int zipWriteInFileInZip(zipFile file, const void *buf, uint32_t len); + +ZEXPORT int zipCloseFileInZipRaw(zipFile file, unsigned long uncompressed_size, unsigned long crc32); +ZEXPORT int zipCloseFileInZipRaw64(zipFile file, uint64_t uncompressed_size, unsigned long crc32); +ZEXPORT int zipCloseFileInZip(zipFile file); +ZEXPORT int zipCloseFileInZip64(zipFile file); + +ZEXPORT int zipClose(zipFile file, const char *global_comment); +ZEXPORT int zipClose_64(zipFile file, const char *global_comment); +ZEXPORT int zipClose2_64(zipFile file, const char *global_comment, uint16_t version_madeby); + int zipClose_MZ(zipFile file, const char *global_comment); + int zipClose2_MZ(zipFile file, const char *global_comment, uint16_t version_madeby); + +/***************************************************************************/ + +#if defined(STRICTUNZIP) || defined(STRICTZIPUNZIP) +/* like the STRICT of WIN32, we define a pointer that cannot be converted + from (void*) without cast */ +typedef struct TagunzFile__ { int unused; } unz_file__; +typedef unz_file__ *unzFile; +#else +typedef void *unzFile; +#endif + +/***************************************************************************/ + +#define UNZ_OK (0) +#define UNZ_END_OF_LIST_OF_FILE (-100) +#define UNZ_ERRNO (-1) +#define UNZ_EOF (0) +#define UNZ_PARAMERROR (-102) +#define UNZ_BADZIPFILE (-103) +#define UNZ_INTERNALERROR (-104) +#define UNZ_CRCERROR (-105) +#define UNZ_BADPASSWORD (-106) + +/***************************************************************************/ + +typedef struct unz_global_info64_s { + uint64_t number_entry; /* total number of entries in the central dir on this disk */ + uint32_t number_disk_with_CD; /* number the the disk with central dir, used for spanning ZIP */ + uint16_t size_comment; /* size of the global comment of the zipfile */ +} unz_global_info64; + +typedef struct unz_global_info_s { + uint32_t number_entry; /* total number of entries in the central dir on this disk */ + uint32_t number_disk_with_CD; /* number the the disk with central dir, used for spanning ZIP */ + uint16_t size_comment; /* size of the global comment of the zipfile */ +} unz_global_info; + +typedef struct unz_file_info64_s { + uint16_t version; /* version made by 2 bytes */ + uint16_t version_needed; /* version needed to extract 2 bytes */ + uint16_t flag; /* general purpose bit flag 2 bytes */ + uint16_t compression_method; /* compression method 2 bytes */ + uint32_t mz_dos_date; /* last mod file date in Dos fmt 4 bytes */ + struct tm tmu_date; + uint32_t crc; /* crc-32 4 bytes */ + uint64_t compressed_size; /* compressed size 8 bytes */ + uint64_t uncompressed_size; /* uncompressed size 8 bytes */ + uint16_t size_filename; /* filename length 2 bytes */ + uint16_t size_file_extra; /* extra field length 2 bytes */ + uint16_t size_file_comment; /* file comment length 2 bytes */ + + uint32_t disk_num_start; /* disk number start 4 bytes */ + uint16_t internal_fa; /* internal file attributes 2 bytes */ + uint32_t external_fa; /* external file attributes 4 bytes */ + + uint64_t disk_offset; + + uint16_t size_file_extra_internal; +} unz_file_info64; + +typedef struct unz_file_info_s { + uint16_t version; /* version made by 2 bytes */ + uint16_t version_needed; /* version needed to extract 2 bytes */ + uint16_t flag; /* general purpose bit flag 2 bytes */ + uint16_t compression_method; /* compression method 2 bytes */ + uint32_t mz_dos_date; /* last mod file date in Dos fmt 4 bytes */ + struct tm tmu_date; + uint32_t crc; /* crc-32 4 bytes */ + uint32_t compressed_size; /* compressed size 4 bytes */ + uint32_t uncompressed_size; /* uncompressed size 4 bytes */ + uint16_t size_filename; /* filename length 2 bytes */ + uint16_t size_file_extra; /* extra field length 2 bytes */ + uint16_t size_file_comment; /* file comment length 2 bytes */ + + uint16_t disk_num_start; /* disk number start 2 bytes */ + uint16_t internal_fa; /* internal file attributes 2 bytes */ + uint32_t external_fa; /* external file attributes 4 bytes */ + + uint64_t disk_offset; +} unz_file_info; + +/***************************************************************************/ + +typedef int (*unzFileNameComparer)(unzFile file, const char *filename1, const char *filename2); +typedef int (*unzIteratorFunction)(unzFile file); +typedef int (*unzIteratorFunction2)(unzFile file, unz_file_info64 *pfile_info, char *filename, + uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, + uint16_t comment_size); + +/***************************************************************************/ +/* Reading a zip file */ + +ZEXPORT unzFile unzOpen(const char *path); +ZEXPORT unzFile unzOpen64(const void *path); +ZEXPORT unzFile unzOpen2(const char *path, zlib_filefunc_def *pzlib_filefunc_def); +ZEXPORT unzFile unzOpen2_64(const void *path, zlib_filefunc64_def *pzlib_filefunc_def); + unzFile unzOpen_MZ(void *stream); + +ZEXPORT int unzClose(unzFile file); +ZEXPORT int unzClose_MZ(unzFile file); + +ZEXPORT void* unzGetHandle_MZ(unzFile file); +ZEXPORT void* unzGetStream_MZ(zipFile file); + +ZEXPORT int unzGetGlobalInfo(unzFile file, unz_global_info* pglobal_info32); +ZEXPORT int unzGetGlobalInfo64(unzFile file, unz_global_info64 *pglobal_info); +ZEXPORT int unzGetGlobalComment(unzFile file, char *comment, unsigned long comment_size); + +ZEXPORT int unzOpenCurrentFile(unzFile file); +ZEXPORT int unzOpenCurrentFilePassword(unzFile file, const char *password); +ZEXPORT int unzOpenCurrentFile2(unzFile file, int *method, int *level, int raw); +ZEXPORT int unzOpenCurrentFile3(unzFile file, int *method, int *level, int raw, const char *password); +ZEXPORT int unzReadCurrentFile(unzFile file, void *buf, uint32_t len); +ZEXPORT int unzCloseCurrentFile(unzFile file); + +ZEXPORT int unzGetCurrentFileInfo(unzFile file, unz_file_info *pfile_info, char *filename, + unsigned long filename_size, void *extrafield, unsigned long extrafield_size, char *comment, + unsigned long comment_size); +ZEXPORT int unzGetCurrentFileInfo64(unzFile file, unz_file_info64 * pfile_info, char *filename, + unsigned long filename_size, void *extrafield, unsigned long extrafield_size, char *comment, + unsigned long comment_size); + +ZEXPORT int unzGoToFirstFile(unzFile file); +ZEXPORT int unzGoToNextFile(unzFile file); +ZEXPORT int unzLocateFile(unzFile file, const char *filename, unzFileNameComparer filename_compare_func); + +ZEXPORT int unzGetLocalExtrafield(unzFile file, void *buf, unsigned int len); + +/***************************************************************************/ +/* Raw access to zip file */ + +typedef struct unz_file_pos_s { + uint32_t pos_in_zip_directory; /* offset in zip file directory */ + uint32_t num_of_file; /* # of file */ +} unz_file_pos; + +ZEXPORT int unzGetFilePos(unzFile file, unz_file_pos *file_pos); +ZEXPORT int unzGoToFilePos(unzFile file, unz_file_pos *file_pos); + +typedef struct unz64_file_pos_s { + int64_t pos_in_zip_directory; /* offset in zip file directory */ + uint64_t num_of_file; /* # of file */ +} unz64_file_pos; + +ZEXPORT int unzGetFilePos64(unzFile file, unz64_file_pos *file_pos); +ZEXPORT int unzGoToFilePos64(unzFile file, const unz64_file_pos *file_pos); + +ZEXPORT int64_t unzGetOffset64(unzFile file); +ZEXPORT unsigned long + unzGetOffset(unzFile file); +ZEXPORT int unzSetOffset64(unzFile file, int64_t pos); +ZEXPORT int unzSetOffset(unzFile file, unsigned long pos); +ZEXPORT int32_t unztell(unzFile file); +ZEXPORT int32_t unzTell(unzFile file); +ZEXPORT uint64_t unztell64(unzFile file); +ZEXPORT uint64_t unzTell64(unzFile file); +ZEXPORT int unzSeek(unzFile file, int32_t offset, int origin); +ZEXPORT int unzSeek64(unzFile file, int64_t offset, int origin); +ZEXPORT int unzEndOfFile(unzFile file); +ZEXPORT int unzeof(unzFile file); +ZEXPORT void* unzGetStream(unzFile file); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_crypt.c b/Externals/minizip/mz_crypt.c new file mode 100644 index 000000000000..dd2cd427c86c --- /dev/null +++ b/Externals/minizip/mz_crypt.c @@ -0,0 +1,196 @@ +/* mz_crypt.c -- Crypto/hash functions + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_crypt.h" + +#if defined(HAVE_ZLIB) +# include "zlib.h" +# if defined(ZLIBNG_VERNUM) && !defined(ZLIB_COMPAT) +# include "zlib-ng.h" +# endif +#elif defined(HAVE_LZMA) +# include "lzma.h" +#endif + +/***************************************************************************/ +/* Define z_crc_t in zlib 1.2.5 and less or if using zlib-ng */ + +#if defined(HAVE_ZLIB) && defined(ZLIBNG_VERNUM) +# if defined(ZLIB_COMPAT) +# define ZLIB_PREFIX(x) x +# else +# define ZLIB_PREFIX(x) zng_ ## x +# endif + typedef uint32_t z_crc_t; +#elif defined(HAVE_ZLIB) +# define ZLIB_PREFIX(x) x +# if (ZLIB_VERNUM < 0x1270) + typedef unsigned long z_crc_t; +# endif +#endif + +/***************************************************************************/ + +#if defined(MZ_ZIP_NO_CRYPTO) +int32_t mz_crypt_rand(uint8_t *buf, int32_t size) { + return mz_os_rand(buf, size); +} +#endif + +uint32_t mz_crypt_crc32_update(uint32_t value, const uint8_t *buf, int32_t size) { +#if defined(HAVE_ZLIB) + return (uint32_t)ZLIB_PREFIX(crc32)((z_crc_t)value, buf, (uInt)size); +#elif defined(HAVE_LZMA) + return (uint32_t)lzma_crc32(buf, (size_t)size, (uint32_t)value); +#else + static uint32_t crc32_table[256] = { + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, + 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, + 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, + 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, + 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, + 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, + 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, + 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, + 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, + 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, + 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, + 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, + 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, + 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, + 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, + 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, + 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, + 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, + 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, + 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, + 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, + 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, + 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, + 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, + 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, + 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, + 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, + 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, + 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, + 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, + 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, + 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, + 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, + 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, + 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, + 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, + 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, + 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, + 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d + }; + value = ~value; + + while (size > 0) { + value = (value >> 8) ^ crc32_table[(value ^ *buf) & 0xFF]; + + buf += 1; + size -= 1; + } + + return ~value; +#endif +} + +#if defined(HAVE_WZAES) +int32_t mz_crypt_pbkdf2(uint8_t *password, int32_t password_length, uint8_t *salt, + int32_t salt_length, int32_t iteration_count, uint8_t *key, int32_t key_length) { + void *hmac1 = NULL; + void *hmac2 = NULL; + void *hmac3 = NULL; + int32_t err = MZ_OK; + uint16_t i = 0; + uint16_t j = 0; + uint16_t k = 0; + uint16_t block_count = 0; + uint8_t uu[MZ_HASH_SHA1_SIZE]; + uint8_t ux[MZ_HASH_SHA1_SIZE]; + + if (password == NULL || salt == NULL || key == NULL) + return MZ_PARAM_ERROR; + + memset(key, 0, key_length); + + mz_crypt_hmac_create(&hmac1); + mz_crypt_hmac_create(&hmac2); + mz_crypt_hmac_create(&hmac3); + + mz_crypt_hmac_set_algorithm(hmac1, MZ_HASH_SHA1); + mz_crypt_hmac_set_algorithm(hmac2, MZ_HASH_SHA1); + mz_crypt_hmac_set_algorithm(hmac3, MZ_HASH_SHA1); + + err = mz_crypt_hmac_init(hmac1, password, password_length); + if (err == MZ_OK) + err = mz_crypt_hmac_init(hmac2, password, password_length); + if (err == MZ_OK) + err = mz_crypt_hmac_update(hmac2, salt, salt_length); + + block_count = 1 + ((uint16_t)key_length - 1) / MZ_HASH_SHA1_SIZE; + + for (i = 0; (err == MZ_OK) && (i < block_count); i += 1) { + memset(ux, 0, sizeof(ux)); + + err = mz_crypt_hmac_copy(hmac2, hmac3); + if (err != MZ_OK) + break; + + uu[0] = (uint8_t)((i + 1) >> 24); + uu[1] = (uint8_t)((i + 1) >> 16); + uu[2] = (uint8_t)((i + 1) >> 8); + uu[3] = (uint8_t)(i + 1); + + for (j = 0, k = 4; j < iteration_count; j += 1) { + err = mz_crypt_hmac_update(hmac3, uu, k); + if (err == MZ_OK) + err = mz_crypt_hmac_end(hmac3, uu, sizeof(uu)); + if (err != MZ_OK) + break; + + for(k = 0; k < MZ_HASH_SHA1_SIZE; k += 1) + ux[k] ^= uu[k]; + + err = mz_crypt_hmac_copy(hmac1, hmac3); + if (err != MZ_OK) + break; + } + + if (err != MZ_OK) + break; + + j = 0; + k = i * MZ_HASH_SHA1_SIZE; + + while (j < MZ_HASH_SHA1_SIZE && k < key_length) + key[k++] = ux[j++]; + } + + /* hmac3 uses the same provider as hmac2, so it must be deleted + before the context is destroyed. */ + mz_crypt_hmac_delete(&hmac3); + mz_crypt_hmac_delete(&hmac1); + mz_crypt_hmac_delete(&hmac2); + + return err; +} +#endif + +/***************************************************************************/ diff --git a/Externals/minizip/mz_crypt.h b/Externals/minizip/mz_crypt.h new file mode 100644 index 000000000000..59a193c02b79 --- /dev/null +++ b/Externals/minizip/mz_crypt.h @@ -0,0 +1,65 @@ +/* mz_crypt.h -- Crypto/hash functions + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_CRYPT_H +#define MZ_CRYPT_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +uint32_t mz_crypt_crc32_update(uint32_t value, const uint8_t *buf, int32_t size); + +int32_t mz_crypt_pbkdf2(uint8_t *password, int32_t password_length, uint8_t *salt, + int32_t salt_length, int32_t iteration_count, uint8_t *key, int32_t key_length); + +/***************************************************************************/ + +int32_t mz_crypt_rand(uint8_t *buf, int32_t size); + +void mz_crypt_sha_reset(void *handle); +int32_t mz_crypt_sha_begin(void *handle); +int32_t mz_crypt_sha_update(void *handle, const void *buf, int32_t size); +int32_t mz_crypt_sha_end(void *handle, uint8_t *digest, int32_t digest_size); +void mz_crypt_sha_set_algorithm(void *handle, uint16_t algorithm); +void* mz_crypt_sha_create(void **handle); +void mz_crypt_sha_delete(void **handle); + +void mz_crypt_aes_reset(void *handle); +int32_t mz_crypt_aes_encrypt(void *handle, uint8_t *buf, int32_t size); +int32_t mz_crypt_aes_decrypt(void *handle, uint8_t *buf, int32_t size); +int32_t mz_crypt_aes_set_encrypt_key(void *handle, const void *key, int32_t key_length); +int32_t mz_crypt_aes_set_decrypt_key(void *handle, const void *key, int32_t key_length); +void mz_crypt_aes_set_mode(void *handle, int32_t mode); +void* mz_crypt_aes_create(void **handle); +void mz_crypt_aes_delete(void **handle); + +void mz_crypt_hmac_reset(void *handle); +int32_t mz_crypt_hmac_init(void *handle, const void *key, int32_t key_length); +int32_t mz_crypt_hmac_update(void *handle, const void *buf, int32_t size); +int32_t mz_crypt_hmac_end(void *handle, uint8_t *digest, int32_t digest_size); +int32_t mz_crypt_hmac_copy(void *src_handle, void *target_handle); +void mz_crypt_hmac_set_algorithm(void *handle, uint16_t algorithm); +void* mz_crypt_hmac_create(void **handle); +void mz_crypt_hmac_delete(void **handle); + +int32_t mz_crypt_sign(uint8_t *message, int32_t message_size, uint8_t *cert_data, int32_t cert_data_size, + const char *cert_pwd, uint8_t **signature, int32_t *signature_size); +int32_t mz_crypt_sign_verify(uint8_t *message, int32_t message_size, uint8_t *signature, int32_t signature_size); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_crypt_apple.c b/Externals/minizip/mz_crypt_apple.c new file mode 100644 index 000000000000..4519753fa4b8 --- /dev/null +++ b/Externals/minizip/mz_crypt_apple.c @@ -0,0 +1,487 @@ +/* mz_crypt_apple.c -- Crypto/hash functions for Apple + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" + +#include +#include +#include +#include +#include +#include + +/***************************************************************************/ + +int32_t mz_crypt_rand(uint8_t *buf, int32_t size) { + if (SecRandomCopyBytes(kSecRandomDefault, size, buf) != errSecSuccess) + return 0; + return size; +} + +/***************************************************************************/ + +typedef struct mz_crypt_sha_s { + CC_SHA1_CTX ctx1; + CC_SHA256_CTX ctx256; + int32_t error; + int32_t initialized; + uint16_t algorithm; +} mz_crypt_sha; + +/***************************************************************************/ + +void mz_crypt_sha_reset(void *handle) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + + sha->error = 0; + sha->initialized = 0; +} + +int32_t mz_crypt_sha_begin(void *handle) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + + if (sha == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_sha_reset(handle); + + if (sha->algorithm == MZ_HASH_SHA1) + sha->error = CC_SHA1_Init(&sha->ctx1); + else if (sha->algorithm == MZ_HASH_SHA256) + sha->error = CC_SHA256_Init(&sha->ctx256); + else + return MZ_PARAM_ERROR; + + if (!sha->error) + return MZ_HASH_ERROR; + + sha->initialized = 1; + return MZ_OK; +} + +int32_t mz_crypt_sha_update(void *handle, const void *buf, int32_t size) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + + if (sha == NULL || buf == NULL || !sha->initialized) + return MZ_PARAM_ERROR; + + if (sha->algorithm == MZ_HASH_SHA1) + sha->error = CC_SHA1_Update(&sha->ctx1, buf, size); + else + sha->error = CC_SHA256_Update(&sha->ctx256, buf, size); + + if (!sha->error) + return MZ_HASH_ERROR; + + return size; +} + +int32_t mz_crypt_sha_end(void *handle, uint8_t *digest, int32_t digest_size) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + + if (sha == NULL || digest == NULL || !sha->initialized) + return MZ_PARAM_ERROR; + + if (sha->algorithm == MZ_HASH_SHA1) { + if (digest_size < MZ_HASH_SHA1_SIZE) + return MZ_BUF_ERROR; + sha->error = CC_SHA1_Final(digest, &sha->ctx1); + } else { + if (digest_size < MZ_HASH_SHA256_SIZE) + return MZ_BUF_ERROR; + sha->error = CC_SHA256_Final(digest, &sha->ctx256); + } + + if (!sha->error) + return MZ_HASH_ERROR; + + return MZ_OK; +} + +void mz_crypt_sha_set_algorithm(void *handle, uint16_t algorithm) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + sha->algorithm = algorithm; +} + +void *mz_crypt_sha_create(void **handle) { + mz_crypt_sha *sha = NULL; + + sha = (mz_crypt_sha *)MZ_ALLOC(sizeof(mz_crypt_sha)); + if (sha != NULL) { + memset(sha, 0, sizeof(mz_crypt_sha)); + sha->algorithm = MZ_HASH_SHA256; + } + if (handle != NULL) + *handle = sha; + + return sha; +} + +void mz_crypt_sha_delete(void **handle) { + mz_crypt_sha *sha = NULL; + if (handle == NULL) + return; + sha = (mz_crypt_sha *)*handle; + if (sha != NULL) { + mz_crypt_sha_reset(*handle); + MZ_FREE(sha); + } + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_crypt_aes_s { + CCCryptorRef crypt; + int32_t mode; + int32_t error; +} mz_crypt_aes; + +/***************************************************************************/ + +void mz_crypt_aes_reset(void *handle) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + + if (aes->crypt != NULL) + CCCryptorRelease(aes->crypt); + aes->crypt = NULL; +} + +int32_t mz_crypt_aes_encrypt(void *handle, uint8_t *buf, int32_t size) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + size_t data_moved = 0; + + if (aes == NULL || buf == NULL) + return MZ_PARAM_ERROR; + if (size != MZ_AES_BLOCK_SIZE) + return MZ_PARAM_ERROR; + + aes->error = CCCryptorUpdate(aes->crypt, buf, size, buf, size, &data_moved); + + if (aes->error != kCCSuccess) + return MZ_HASH_ERROR; + + return size; +} + +int32_t mz_crypt_aes_decrypt(void *handle, uint8_t *buf, int32_t size) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + size_t data_moved = 0; + + if (aes == NULL || buf == NULL) + return MZ_PARAM_ERROR; + if (size != MZ_AES_BLOCK_SIZE) + return MZ_PARAM_ERROR; + + aes->error = CCCryptorUpdate(aes->crypt, buf, size, buf, size, &data_moved); + + if (aes->error != kCCSuccess) + return MZ_HASH_ERROR; + + return size; +} + +int32_t mz_crypt_aes_set_encrypt_key(void *handle, const void *key, int32_t key_length) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + + + if (aes == NULL || key == NULL || key_length == 0) + return MZ_PARAM_ERROR; + + mz_crypt_aes_reset(handle); + + aes->error = CCCryptorCreate(kCCEncrypt, kCCAlgorithmAES, kCCOptionECBMode, + key, key_length, NULL, &aes->crypt); + + if (aes->error != kCCSuccess) + return MZ_HASH_ERROR; + + return MZ_OK; +} + +int32_t mz_crypt_aes_set_decrypt_key(void *handle, const void *key, int32_t key_length) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + + + if (aes == NULL || key == NULL || key_length == 0) + return MZ_PARAM_ERROR; + + mz_crypt_aes_reset(handle); + + aes->error = CCCryptorCreate(kCCDecrypt, kCCAlgorithmAES, kCCOptionECBMode, + key, key_length, NULL, &aes->crypt); + + if (aes->error != kCCSuccess) + return MZ_HASH_ERROR; + + return MZ_OK; +} + +void mz_crypt_aes_set_mode(void *handle, int32_t mode) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + aes->mode = mode; +} + +void *mz_crypt_aes_create(void **handle) { + mz_crypt_aes *aes = NULL; + + aes = (mz_crypt_aes *)MZ_ALLOC(sizeof(mz_crypt_aes)); + if (aes != NULL) + memset(aes, 0, sizeof(mz_crypt_aes)); + if (handle != NULL) + *handle = aes; + + return aes; +} + +void mz_crypt_aes_delete(void **handle) { + mz_crypt_aes *aes = NULL; + if (handle == NULL) + return; + aes = (mz_crypt_aes *)*handle; + if (aes != NULL) { + mz_crypt_aes_reset(*handle); + MZ_FREE(aes); + } + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_crypt_hmac_s { + CCHmacContext ctx; + int32_t initialized; + int32_t error; + uint16_t algorithm; +} mz_crypt_hmac; + +/***************************************************************************/ + +static void mz_crypt_hmac_free(void *handle) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + memset(&hmac->ctx, 0, sizeof(hmac->ctx)); +} + +void mz_crypt_hmac_reset(void *handle) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + mz_crypt_hmac_free(handle); + hmac->error = 0; +} + +int32_t mz_crypt_hmac_init(void *handle, const void *key, int32_t key_length) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + CCHmacAlgorithm algorithm = 0; + + if (hmac == NULL || key == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_hmac_reset(handle); + + if (hmac->algorithm == MZ_HASH_SHA1) + algorithm = kCCHmacAlgSHA1; + else if (hmac->algorithm == MZ_HASH_SHA256) + algorithm = kCCHmacAlgSHA256; + else + return MZ_PARAM_ERROR; + + CCHmacInit(&hmac->ctx, algorithm, key, key_length); + return MZ_OK; +} + +int32_t mz_crypt_hmac_update(void *handle, const void *buf, int32_t size) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + + if (hmac == NULL || buf == NULL) + return MZ_PARAM_ERROR; + + CCHmacUpdate(&hmac->ctx, buf, size); + return MZ_OK; +} + +int32_t mz_crypt_hmac_end(void *handle, uint8_t *digest, int32_t digest_size) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + + if (hmac == NULL || digest == NULL) + return MZ_PARAM_ERROR; + + if (hmac->algorithm == MZ_HASH_SHA1) { + if (digest_size < MZ_HASH_SHA1_SIZE) + return MZ_BUF_ERROR; + CCHmacFinal(&hmac->ctx, digest); + } else { + if (digest_size < MZ_HASH_SHA256_SIZE) + return MZ_BUF_ERROR; + CCHmacFinal(&hmac->ctx, digest); + } + + return MZ_OK; +} + +void mz_crypt_hmac_set_algorithm(void *handle, uint16_t algorithm) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + hmac->algorithm = algorithm; +} + +int32_t mz_crypt_hmac_copy(void *src_handle, void *target_handle) { + mz_crypt_hmac *source = (mz_crypt_hmac *)src_handle; + mz_crypt_hmac *target = (mz_crypt_hmac *)target_handle; + + if (source == NULL || target == NULL) + return MZ_PARAM_ERROR; + + memcpy(&target->ctx, &source->ctx, sizeof(CCHmacContext)); + return MZ_OK; +} + +void *mz_crypt_hmac_create(void **handle) { + mz_crypt_hmac *hmac = NULL; + + hmac = (mz_crypt_hmac *)MZ_ALLOC(sizeof(mz_crypt_hmac)); + if (hmac != NULL) { + memset(hmac, 0, sizeof(mz_crypt_hmac)); + hmac->algorithm = MZ_HASH_SHA256; + } + if (handle != NULL) + *handle = hmac; + + return hmac; +} + +void mz_crypt_hmac_delete(void **handle) { + mz_crypt_hmac *hmac = NULL; + if (handle == NULL) + return; + hmac = (mz_crypt_hmac *)*handle; + if (hmac != NULL) { + mz_crypt_hmac_free(*handle); + MZ_FREE(hmac); + } + *handle = NULL; +} + +/***************************************************************************/ + +#if defined(MZ_ZIP_SIGNING) +int32_t mz_crypt_sign(uint8_t *message, int32_t message_size, uint8_t *cert_data, int32_t cert_data_size, + const char *cert_pwd, uint8_t **signature, int32_t *signature_size) { + CFStringRef password_ref = NULL; + CFDictionaryRef options_dict = NULL; + CFDictionaryRef identity_trust = NULL; + CFDataRef signature_out = NULL; + CFDataRef pkcs12_data = NULL; + CFArrayRef items = 0; + SecIdentityRef identity = NULL; + SecTrustRef trust = NULL; + OSStatus status = noErr; + const void *options_key[2] = { kSecImportExportPassphrase, kSecReturnRef }; + const void *options_values[2] = { 0, kCFBooleanTrue }; + int32_t err = MZ_SIGN_ERROR; + + + if (message == NULL || cert_data == NULL || signature == NULL || signature_size == NULL) + return MZ_PARAM_ERROR; + + *signature = NULL; + *signature_size = 0; + + password_ref = CFStringCreateWithCString(0, cert_pwd, kCFStringEncodingUTF8); + options_values[0] = password_ref; + + options_dict = CFDictionaryCreate(0, options_key, options_values, 2, 0, 0); + if (options_dict) + pkcs12_data = CFDataCreate(0, cert_data, cert_data_size); + if (pkcs12_data) + status = SecPKCS12Import(pkcs12_data, options_dict, &items); + if (status == noErr) + identity_trust = CFArrayGetValueAtIndex(items, 0); + if (identity_trust) + identity = (SecIdentityRef)CFDictionaryGetValue(identity_trust, kSecImportItemIdentity); + if (identity) + trust = (SecTrustRef)CFDictionaryGetValue(identity_trust, kSecImportItemTrust); + if (trust) { + status = CMSEncodeContent(identity, NULL, NULL, FALSE, 0, message, message_size, &signature_out); + + if (status == errSecSuccess) { + *signature_size = CFDataGetLength(signature_out); + *signature = (uint8_t *)MZ_ALLOC(*signature_size); + + memcpy(*signature, CFDataGetBytePtr(signature_out), *signature_size); + + err = MZ_OK; + } + } + + if (signature_out) + CFRelease(signature_out); + if (items) + CFRelease(items); + if (pkcs12_data) + CFRelease(pkcs12_data); + if (options_dict) + CFRelease(options_dict); + if (password_ref) + CFRelease(password_ref); + + return err; +} + +int32_t mz_crypt_sign_verify(uint8_t *message, int32_t message_size, uint8_t *signature, int32_t signature_size) { + CMSDecoderRef decoder = NULL; + CMSSignerStatus signer_status = 0; + CFDataRef message_out = NULL; + SecPolicyRef trust_policy = NULL; + OSStatus status = noErr; + OSStatus verify_status = noErr; + size_t signer_count = 0; + size_t i = 0; + int32_t err = MZ_SIGN_ERROR; + + if (message == NULL || signature == NULL) + return MZ_PARAM_ERROR; + + status = CMSDecoderCreate(&decoder); + if (status == errSecSuccess) + status = CMSDecoderUpdateMessage(decoder, signature, signature_size); + if (status == errSecSuccess) + status = CMSDecoderFinalizeMessage(decoder); + if (status == errSecSuccess) + trust_policy = SecPolicyCreateBasicX509(); + + if (status == errSecSuccess && trust_policy) { + CMSDecoderGetNumSigners(decoder, &signer_count); + if (signer_count > 0) + err = MZ_OK; + for (i = 0; i < signer_count; i += 1) { + status = CMSDecoderCopySignerStatus(decoder, i, trust_policy, TRUE, &signer_status, NULL, &verify_status); + if (status != errSecSuccess || verify_status != 0 || signer_status != kCMSSignerValid) { + err = MZ_SIGN_ERROR; + break; + } + } + } + + if (err == MZ_OK) { + status = CMSDecoderCopyContent(decoder, &message_out); + if ((status != errSecSuccess) || + (CFDataGetLength(message_out) != message_size) || + (memcmp(message, CFDataGetBytePtr(message_out), message_size) != 0)) + err = MZ_SIGN_ERROR; + } + + if (trust_policy) + CFRelease(trust_policy); + if (decoder) + CFRelease(decoder); + + return err; +} + +#endif diff --git a/Externals/minizip/mz_crypt_openssl.c b/Externals/minizip/mz_crypt_openssl.c new file mode 100644 index 000000000000..2a6a4ac8750f --- /dev/null +++ b/Externals/minizip/mz_crypt_openssl.c @@ -0,0 +1,638 @@ +/* mz_crypt_openssl.c -- Crypto/hash functions for OpenSSL + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(MZ_ZIP_SIGNING) +/* Note: https://www.imperialviolet.org/2015/10/17/boringssl.html says that + BoringSSL does not support CMS. "#include " will fail. See + https://bugs.chromium.org/p/boringssl/issues/detail?id=421 +*/ +#include +#include +#include +#endif + +/***************************************************************************/ + +static void mz_crypt_init(void) { + static int32_t openssl_initialized = 0; + if (openssl_initialized == 0) { + OpenSSL_add_all_algorithms(); + + ERR_load_BIO_strings(); + ERR_load_crypto_strings(); + + ENGINE_load_builtin_engines(); + ENGINE_register_all_complete(); + + openssl_initialized = 1; + } +} + +int32_t mz_crypt_rand(uint8_t *buf, int32_t size) { + int32_t result = 0; + + result = RAND_bytes(buf, size); + + if (!result) + return MZ_CRYPT_ERROR; + + return size; +} + +/***************************************************************************/ + +typedef struct mz_crypt_sha_s { + SHA256_CTX ctx256; + SHA_CTX ctx1; + int32_t initialized; + int32_t error; + uint16_t algorithm; +} mz_crypt_sha; + +/***************************************************************************/ + +void mz_crypt_sha_reset(void *handle) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + + sha->error = 0; + sha->initialized = 0; + + mz_crypt_init(); +} + +int32_t mz_crypt_sha_begin(void *handle) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + int32_t result = 0; + + + if (sha == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_sha_reset(handle); + + if (sha->algorithm == MZ_HASH_SHA1) + result = SHA1_Init(&sha->ctx1); + else + result = SHA256_Init(&sha->ctx256); + + if (!result) { + sha->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + sha->initialized = 1; + return MZ_OK; +} + +int32_t mz_crypt_sha_update(void *handle, const void *buf, int32_t size) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + int32_t result = 0; + + if (sha == NULL || buf == NULL || !sha->initialized) + return MZ_PARAM_ERROR; + + if (sha->algorithm == MZ_HASH_SHA1) + result = SHA1_Update(&sha->ctx1, buf, size); + else + result = SHA256_Update(&sha->ctx256, buf, size); + + if (!result) { + sha->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return size; +} + +int32_t mz_crypt_sha_end(void *handle, uint8_t *digest, int32_t digest_size) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + int32_t result = 0; + + if (sha == NULL || digest == NULL || !sha->initialized) + return MZ_PARAM_ERROR; + + if (sha->algorithm == MZ_HASH_SHA1) { + if (digest_size < MZ_HASH_SHA1_SIZE) + return MZ_BUF_ERROR; + result = SHA1_Final(digest, &sha->ctx1); + } else { + if (digest_size < MZ_HASH_SHA256_SIZE) + return MZ_BUF_ERROR; + result = SHA256_Final(digest, &sha->ctx256); + } + + if (!result) { + sha->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +void mz_crypt_sha_set_algorithm(void *handle, uint16_t algorithm) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + sha->algorithm = algorithm; +} + +void *mz_crypt_sha_create(void **handle) { + mz_crypt_sha *sha = NULL; + + sha = (mz_crypt_sha *)MZ_ALLOC(sizeof(mz_crypt_sha)); + if (sha != NULL) { + memset(sha, 0, sizeof(mz_crypt_sha)); + sha->algorithm = MZ_HASH_SHA256; + } + if (handle != NULL) + *handle = sha; + + return sha; +} + +void mz_crypt_sha_delete(void **handle) { + mz_crypt_sha *sha = NULL; + if (handle == NULL) + return; + sha = (mz_crypt_sha *)*handle; + if (sha != NULL) { + mz_crypt_sha_reset(*handle); + MZ_FREE(sha); + } + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_crypt_aes_s { + AES_KEY key; + int32_t mode; + int32_t error; + uint8_t *key_copy; + int32_t key_length; +} mz_crypt_aes; + +/***************************************************************************/ + +void mz_crypt_aes_reset(void *handle) { + MZ_UNUSED(handle); + + mz_crypt_init(); +} + +int32_t mz_crypt_aes_encrypt(void *handle, uint8_t *buf, int32_t size) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + + if (aes == NULL || buf == NULL) + return MZ_PARAM_ERROR; + if (size != MZ_AES_BLOCK_SIZE) + return MZ_PARAM_ERROR; + + AES_encrypt(buf, buf, &aes->key); + /* Equivalent to AES_ecb_encrypt with AES_ENCRYPT */ + return size; +} + +int32_t mz_crypt_aes_decrypt(void *handle, uint8_t *buf, int32_t size) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + if (aes == NULL || buf == NULL) + return MZ_PARAM_ERROR; + if (size != MZ_AES_BLOCK_SIZE) + return MZ_PARAM_ERROR; + + AES_decrypt(buf, buf, &aes->key); + /* Equivalent to AES_ecb_encrypt with AES_DECRYPT */ + return size; +} + +int32_t mz_crypt_aes_set_encrypt_key(void *handle, const void *key, int32_t key_length) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + int32_t result = 0; + int32_t key_bits = 0; + + + if (aes == NULL || key == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_aes_reset(handle); + + key_bits = key_length * 8; + result = AES_set_encrypt_key(key, key_bits, &aes->key); + if (result) { + aes->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +int32_t mz_crypt_aes_set_decrypt_key(void *handle, const void *key, int32_t key_length) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + int32_t result = 0; + int32_t key_bits = 0; + + + if (aes == NULL || key == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_aes_reset(handle); + + key_bits = key_length * 8; + result = AES_set_decrypt_key(key, key_bits, &aes->key); + if (result) { + aes->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +void mz_crypt_aes_set_mode(void *handle, int32_t mode) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + aes->mode = mode; +} + +void *mz_crypt_aes_create(void **handle) { + mz_crypt_aes *aes = NULL; + + aes = (mz_crypt_aes *)MZ_ALLOC(sizeof(mz_crypt_aes)); + if (aes != NULL) + memset(aes, 0, sizeof(mz_crypt_aes)); + if (handle != NULL) + *handle = aes; + + return aes; +} + +void mz_crypt_aes_delete(void **handle) { + mz_crypt_aes *aes = NULL; + if (handle == NULL) + return; + aes = (mz_crypt_aes *)*handle; + if (aes != NULL) + MZ_FREE(aes); + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_crypt_hmac_s { + HMAC_CTX *ctx; + int32_t initialized; + int32_t error; + uint16_t algorithm; +} mz_crypt_hmac; + +/***************************************************************************/ + +#if (OPENSSL_VERSION_NUMBER < 0x10100000L) || (defined(LIBRESSL_VERSION_NUMBER) && (LIBRESSL_VERSION_NUMBER < 0x2070000fL)) +static HMAC_CTX *HMAC_CTX_new(void) { + HMAC_CTX *ctx = OPENSSL_malloc(sizeof(HMAC_CTX)); + if (ctx != NULL) + HMAC_CTX_init(ctx); + return ctx; +} + +static void HMAC_CTX_free(HMAC_CTX *ctx) { + if (ctx != NULL) { + HMAC_CTX_cleanup(ctx); + OPENSSL_free(ctx); + } +} +#endif + +/***************************************************************************/ + +void mz_crypt_hmac_reset(void *handle) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + + HMAC_CTX_free(hmac->ctx); + + hmac->ctx = NULL; + hmac->error = 0; + + mz_crypt_init(); +} + +int32_t mz_crypt_hmac_init(void *handle, const void *key, int32_t key_length) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + int32_t result = 0; + const EVP_MD *evp_md = NULL; + + if (hmac == NULL || key == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_hmac_reset(handle); + + hmac->ctx = HMAC_CTX_new(); + + if (hmac->algorithm == MZ_HASH_SHA1) + evp_md = EVP_sha1(); + else + evp_md = EVP_sha256(); + + result = HMAC_Init_ex(hmac->ctx, key, key_length, evp_md, NULL); + if (!result) { + hmac->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +int32_t mz_crypt_hmac_update(void *handle, const void *buf, int32_t size) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + int32_t result = 0; + + if (hmac == NULL || buf == NULL) + return MZ_PARAM_ERROR; + + result = HMAC_Update(hmac->ctx, buf, size); + if (!result) { + hmac->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +int32_t mz_crypt_hmac_end(void *handle, uint8_t *digest, int32_t digest_size) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + int32_t result = 0; + + if (hmac == NULL || digest == NULL) + return MZ_PARAM_ERROR; + + if (hmac->algorithm == MZ_HASH_SHA1) { + if (digest_size < MZ_HASH_SHA1_SIZE) + return MZ_BUF_ERROR; + + result = HMAC_Final(hmac->ctx, digest, (uint32_t *)&digest_size); + } else { + if (digest_size < MZ_HASH_SHA256_SIZE) + return MZ_BUF_ERROR; + result = HMAC_Final(hmac->ctx, digest, (uint32_t *)&digest_size); + } + + if (!result) { + hmac->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +void mz_crypt_hmac_set_algorithm(void *handle, uint16_t algorithm) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + hmac->algorithm = algorithm; +} + +int32_t mz_crypt_hmac_copy(void *src_handle, void *target_handle) { + mz_crypt_hmac *source = (mz_crypt_hmac *)src_handle; + mz_crypt_hmac *target = (mz_crypt_hmac *)target_handle; + int32_t result = 0; + + if (source == NULL || target == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_hmac_reset(target_handle); + + if (target->ctx == NULL) + target->ctx = HMAC_CTX_new(); + + result = HMAC_CTX_copy(target->ctx, source->ctx); + if (!result) { + target->error = ERR_get_error(); + return MZ_HASH_ERROR; + } + + return MZ_OK; +} + +void *mz_crypt_hmac_create(void **handle) { + mz_crypt_hmac *hmac = NULL; + + hmac = (mz_crypt_hmac *)MZ_ALLOC(sizeof(mz_crypt_hmac)); + if (hmac != NULL) { + memset(hmac, 0, sizeof(mz_crypt_hmac)); + hmac->algorithm = MZ_HASH_SHA256; + } + if (handle != NULL) + *handle = hmac; + + return hmac; +} + +void mz_crypt_hmac_delete(void **handle) { + mz_crypt_hmac *hmac = NULL; + if (handle == NULL) + return; + hmac = (mz_crypt_hmac *)*handle; + if (hmac != NULL) { + mz_crypt_hmac_reset(*handle); + MZ_FREE(hmac); + } + *handle = NULL; +} + +/***************************************************************************/ + +#if defined(MZ_ZIP_SIGNING) +int32_t mz_crypt_sign(uint8_t *message, int32_t message_size, uint8_t *cert_data, int32_t cert_data_size, + const char *cert_pwd, uint8_t **signature, int32_t *signature_size) { + PKCS12 *p12 = NULL; + EVP_PKEY *evp_pkey = NULL; + BUF_MEM *buf_mem = NULL; + BIO *cert_bio = NULL; + BIO *message_bio = NULL; + BIO *signature_bio = NULL; + CMS_ContentInfo *cms = NULL; + CMS_SignerInfo *signer_info = NULL; + STACK_OF(X509) *ca_stack = NULL; + X509 *cert = NULL; + int32_t result = 0; + int32_t err = MZ_OK; + + + if (message == NULL || cert_data == NULL || signature == NULL || signature_size == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_init(); + + *signature = NULL; + *signature_size = 0; + + cert_bio = BIO_new_mem_buf(cert_data, cert_data_size); + + if (d2i_PKCS12_bio(cert_bio, &p12) == NULL) + err = MZ_SIGN_ERROR; + if (err == MZ_OK) + result = PKCS12_parse(p12, cert_pwd, &evp_pkey, &cert, &ca_stack); + if (result) { + cms = CMS_sign(NULL, NULL, ca_stack, NULL, CMS_BINARY | CMS_PARTIAL); + if (cms) + signer_info = CMS_add1_signer(cms, cert, evp_pkey, EVP_sha256(), 0); + if (signer_info == NULL) { + err = MZ_SIGN_ERROR; + } else { + message_bio = BIO_new_mem_buf(message, message_size); + signature_bio = BIO_new(BIO_s_mem()); + + result = CMS_final(cms, message_bio, NULL, CMS_BINARY); + if (result) + result = i2d_CMS_bio(signature_bio, cms); + if (result) { + BIO_flush(signature_bio); + BIO_get_mem_ptr(signature_bio, &buf_mem); + + *signature_size = buf_mem->length; + *signature = MZ_ALLOC(buf_mem->length); + + memcpy(*signature, buf_mem->data, buf_mem->length); + } +#if 0 + BIO *yy = BIO_new_file("xyz", "wb"); + BIO_write(yy, *signature, *signature_size); + BIO_flush(yy); + BIO_free(yy); +#endif + } + } + + if (!result) + err = MZ_SIGN_ERROR; + + if (cms) + CMS_ContentInfo_free(cms); + if (signature_bio) + BIO_free(signature_bio); + if (cert_bio) + BIO_free(cert_bio); + if (message_bio) + BIO_free(message_bio); + if (p12) + PKCS12_free(p12); + + if (err != MZ_OK && *signature != NULL) { + MZ_FREE(*signature); + *signature = NULL; + *signature_size = 0; + } + + return err; +} + +int32_t mz_crypt_sign_verify(uint8_t *message, int32_t message_size, uint8_t *signature, int32_t signature_size) { + CMS_ContentInfo *cms = NULL; + STACK_OF(X509) *signers = NULL; + STACK_OF(X509) *intercerts = NULL; + X509_STORE *cert_store = NULL; + X509_LOOKUP *lookup = NULL; + X509_STORE_CTX *store_ctx = NULL; + BIO *message_bio = NULL; + BIO *signature_bio = NULL; + BUF_MEM *buf_mem = NULL; + int32_t signer_count = 0; + int32_t result = 0; + int32_t i = 0; + int32_t err = MZ_SIGN_ERROR; + + + if (message == NULL || message_size == 0 || signature == NULL || signature_size == 0) + return MZ_PARAM_ERROR; + + mz_crypt_init(); + + cert_store = X509_STORE_new(); + + X509_STORE_load_locations(cert_store, "cacert.pem", NULL); + X509_STORE_set_default_paths(cert_store); + +#if 0 + BIO *yy = BIO_new_file("xyz", "wb"); + BIO_write(yy, signature, signature_size); + BIO_flush(yy); + BIO_free(yy); +#endif + + lookup = X509_STORE_add_lookup(cert_store, X509_LOOKUP_file()); + if (lookup != NULL) + X509_LOOKUP_load_file(lookup, "cacert.pem", X509_FILETYPE_PEM); + lookup = X509_STORE_add_lookup(cert_store, X509_LOOKUP_hash_dir()); + if (lookup != NULL) + X509_LOOKUP_add_dir(lookup, NULL, X509_FILETYPE_DEFAULT); + + signature_bio = BIO_new_mem_buf(signature, signature_size); + message_bio = BIO_new(BIO_s_mem()); + + cms = d2i_CMS_bio(signature_bio, NULL); + if (cms) { + result = CMS_verify(cms, NULL, cert_store, NULL, message_bio, CMS_NO_SIGNER_CERT_VERIFY | CMS_BINARY); + if (result) + signers = CMS_get0_signers(cms); + if (signers) + intercerts = CMS_get1_certs(cms); + if (intercerts) { + /* Verify signer certificates */ + signer_count = sk_X509_num(signers); + if (signer_count > 0) + err = MZ_OK; + + for (i = 0; i < signer_count; i++) { + store_ctx = X509_STORE_CTX_new(); + X509_STORE_CTX_init(store_ctx, cert_store, sk_X509_value(signers, i), intercerts); + result = X509_verify_cert(store_ctx); + if (store_ctx) + X509_STORE_CTX_free(store_ctx); + + if (!result) { + err = MZ_SIGN_ERROR; + break; + } + } + } + + BIO_get_mem_ptr(message_bio, &buf_mem); + + if (err == MZ_OK) { + /* Verify the message */ + if (((int32_t)buf_mem->length != message_size) || + (memcmp(buf_mem->data, message, message_size) != 0)) + err = MZ_SIGN_ERROR; + } + } + +#if 0 + if (!result) + printf(ERR_error_string(ERR_get_error(), NULL)); +#endif + + if (cms) + CMS_ContentInfo_free(cms); + if (message_bio) + BIO_free(message_bio); + if (signature_bio) + BIO_free(signature_bio); + if (cert_store) + X509_STORE_free(cert_store); + + return err; +} +#endif diff --git a/Externals/minizip/mz_crypt_win32.c b/Externals/minizip/mz_crypt_win32.c new file mode 100644 index 000000000000..6ec6a848d02c --- /dev/null +++ b/Externals/minizip/mz_crypt_win32.c @@ -0,0 +1,739 @@ +/* mz_crypt_win32.c -- Crypto/hash functions for Windows + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_os.h" +#include "mz_crypt.h" + +#include +#include + +/***************************************************************************/ + +int32_t mz_crypt_rand(uint8_t *buf, int32_t size) { + HCRYPTPROV provider; + int32_t result = 0; + + + result = CryptAcquireContext(&provider, NULL, NULL, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT | CRYPT_SILENT); + if (result) { + result = CryptGenRandom(provider, size, buf); + CryptReleaseContext(provider, 0); + if (result) + return size; + } + + return mz_os_rand(buf, size); +} + +/***************************************************************************/ + +typedef struct mz_crypt_sha_s { + HCRYPTPROV provider; + HCRYPTHASH hash; + int32_t error; + uint16_t algorithm; +} mz_crypt_sha; + +/***************************************************************************/ + +void mz_crypt_sha_reset(void *handle) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + if (sha->hash) + CryptDestroyHash(sha->hash); + sha->hash = 0; + if (sha->provider) + CryptReleaseContext(sha->provider, 0); + sha->provider = 0; + sha->error = 0; +} + +int32_t mz_crypt_sha_begin(void *handle) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + ALG_ID alg_id = 0; + int32_t result = 0; + int32_t err = MZ_OK; + + + if (sha == NULL) + return MZ_PARAM_ERROR; + + if (sha->algorithm == MZ_HASH_SHA1) + alg_id = CALG_SHA1; + else + alg_id = CALG_SHA_256; + + result = CryptAcquireContext(&sha->provider, NULL, NULL, PROV_RSA_AES, CRYPT_VERIFYCONTEXT | CRYPT_SILENT); + if (!result) { + sha->error = GetLastError(); + err = MZ_CRYPT_ERROR; + } + + if (result) { + result = CryptCreateHash(sha->provider, alg_id, 0, 0, &sha->hash); + if (!result) { + sha->error = GetLastError(); + err = MZ_HASH_ERROR; + } + } + + return err; +} + +int32_t mz_crypt_sha_update(void *handle, const void *buf, int32_t size) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + int32_t result = 0; + + if (sha == NULL || buf == NULL || sha->hash == 0) + return MZ_PARAM_ERROR; + result = CryptHashData(sha->hash, buf, size, 0); + if (!result) { + sha->error = GetLastError(); + return MZ_HASH_ERROR; + } + return size; +} + +int32_t mz_crypt_sha_end(void *handle, uint8_t *digest, int32_t digest_size) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + int32_t result = 0; + int32_t expected_size = 0; + + if (sha == NULL || digest == NULL || sha->hash == 0) + return MZ_PARAM_ERROR; + result = CryptGetHashParam(sha->hash, HP_HASHVAL, NULL, (DWORD *)&expected_size, 0); + if (expected_size > digest_size) + return MZ_BUF_ERROR; + if (!result) + return MZ_HASH_ERROR; + result = CryptGetHashParam(sha->hash, HP_HASHVAL, digest, (DWORD *)&digest_size, 0); + if (!result) { + sha->error = GetLastError(); + return MZ_HASH_ERROR; + } + return MZ_OK; +} + +void mz_crypt_sha_set_algorithm(void *handle, uint16_t algorithm) { + mz_crypt_sha *sha = (mz_crypt_sha *)handle; + sha->algorithm = algorithm; +} + +void *mz_crypt_sha_create(void **handle) { + mz_crypt_sha *sha = NULL; + + sha = (mz_crypt_sha *)MZ_ALLOC(sizeof(mz_crypt_sha)); + if (sha != NULL) { + memset(sha, 0, sizeof(mz_crypt_sha)); + sha->algorithm = MZ_HASH_SHA256; + } + if (handle != NULL) + *handle = sha; + + return sha; +} + +void mz_crypt_sha_delete(void **handle) { + mz_crypt_sha *sha = NULL; + if (handle == NULL) + return; + sha = (mz_crypt_sha *)*handle; + if (sha != NULL) { + mz_crypt_sha_reset(*handle); + MZ_FREE(sha); + } + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_crypt_aes_s { + HCRYPTPROV provider; + HCRYPTKEY key; + int32_t mode; + int32_t error; +} mz_crypt_aes; + +/***************************************************************************/ + +static void mz_crypt_aes_free(void *handle) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + if (aes->key) + CryptDestroyKey(aes->key); + aes->key = 0; + if (aes->provider) + CryptReleaseContext(aes->provider, 0); + aes->provider = 0; +} + +void mz_crypt_aes_reset(void *handle) { + mz_crypt_aes_free(handle); +} + +int32_t mz_crypt_aes_encrypt(void *handle, uint8_t *buf, int32_t size) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + int32_t result = 0; + + if (aes == NULL || buf == NULL) + return MZ_PARAM_ERROR; + if (size != MZ_AES_BLOCK_SIZE) + return MZ_PARAM_ERROR; + result = CryptEncrypt(aes->key, 0, 0, 0, buf, (DWORD *)&size, size); + if (!result) { + aes->error = GetLastError(); + return MZ_CRYPT_ERROR; + } + return size; +} + +int32_t mz_crypt_aes_decrypt(void *handle, uint8_t *buf, int32_t size) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + int32_t result = 0; + if (aes == NULL || buf == NULL) + return MZ_PARAM_ERROR; + if (size != MZ_AES_BLOCK_SIZE) + return MZ_PARAM_ERROR; + result = CryptDecrypt(aes->key, 0, 0, 0, buf, (DWORD *)&size); + if (!result) { + aes->error = GetLastError(); + return MZ_CRYPT_ERROR; + } + return size; +} + +static int32_t mz_crypt_aes_set_key(void *handle, const void *key, int32_t key_length) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + HCRYPTHASH hash = 0; + ALG_ID alg_id = 0; + typedef struct key_blob_header_s { + BLOBHEADER hdr; + uint32_t key_length; + } key_blob_header_s; + key_blob_header_s *key_blob_s = NULL; + uint32_t mode = CRYPT_MODE_ECB; + uint8_t *key_blob = NULL; + int32_t key_blob_size = 0; + int32_t result = 0; + int32_t err = MZ_OK; + + + if (aes == NULL || key == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_aes_reset(handle); + + if (key_length == MZ_AES_KEY_LENGTH(MZ_AES_ENCRYPTION_MODE_128)) + alg_id = CALG_AES_128; + else if (key_length == MZ_AES_KEY_LENGTH(MZ_AES_ENCRYPTION_MODE_192)) + alg_id = CALG_AES_192; + else if (key_length == MZ_AES_KEY_LENGTH(MZ_AES_ENCRYPTION_MODE_256)) + alg_id = CALG_AES_256; + else + return MZ_PARAM_ERROR; + + result = CryptAcquireContext(&aes->provider, NULL, MS_ENH_RSA_AES_PROV, PROV_RSA_AES, CRYPT_VERIFYCONTEXT | CRYPT_SILENT); + if (result) { + key_blob_size = sizeof(key_blob_header_s) + key_length; + key_blob = (uint8_t *)MZ_ALLOC(key_blob_size); + if (key_blob) { + key_blob_s = (key_blob_header_s *)key_blob; + key_blob_s->hdr.bType = PLAINTEXTKEYBLOB; + key_blob_s->hdr.bVersion = CUR_BLOB_VERSION; + key_blob_s->hdr.aiKeyAlg = alg_id; + key_blob_s->hdr.reserved = 0; + key_blob_s->key_length = key_length; + + memcpy(key_blob + sizeof(key_blob_header_s), key, key_length); + + result = CryptImportKey(aes->provider, key_blob, key_blob_size, 0, 0, &aes->key); + + SecureZeroMemory(key_blob, key_blob_size); + MZ_FREE(key_blob); + } else { + err = MZ_MEM_ERROR; + } + } + + if (result && err == MZ_OK) + result = CryptSetKeyParam(aes->key, KP_MODE, (const uint8_t *)&mode, 0); + + if (!result && err == MZ_OK) { + aes->error = GetLastError(); + err = MZ_CRYPT_ERROR; + } + + if (hash) + CryptDestroyHash(hash); + + return err; +} + +int32_t mz_crypt_aes_set_encrypt_key(void *handle, const void *key, int32_t key_length) { + return mz_crypt_aes_set_key(handle, key, key_length); +} + +int32_t mz_crypt_aes_set_decrypt_key(void *handle, const void *key, int32_t key_length) { + return mz_crypt_aes_set_key(handle, key, key_length); +} + +void mz_crypt_aes_set_mode(void *handle, int32_t mode) { + mz_crypt_aes *aes = (mz_crypt_aes *)handle; + aes->mode = mode; +} + +void *mz_crypt_aes_create(void **handle) { + mz_crypt_aes *aes = NULL; + + aes = (mz_crypt_aes *)MZ_ALLOC(sizeof(mz_crypt_aes)); + if (aes != NULL) + memset(aes, 0, sizeof(mz_crypt_aes)); + if (handle != NULL) + *handle = aes; + + return aes; +} + +void mz_crypt_aes_delete(void **handle) { + mz_crypt_aes *aes = NULL; + if (handle == NULL) + return; + aes = (mz_crypt_aes *)*handle; + if (aes != NULL) { + mz_crypt_aes_free(*handle); + MZ_FREE(aes); + } + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_crypt_hmac_s { + HCRYPTPROV provider; + HCRYPTHASH hash; + HCRYPTKEY key; + HMAC_INFO info; + int32_t mode; + int32_t error; + uint16_t algorithm; +} mz_crypt_hmac; + +/***************************************************************************/ + +static void mz_crypt_hmac_free(void *handle) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + if (hmac->key) + CryptDestroyKey(hmac->key); + hmac->key = 0; + if (hmac->hash) + CryptDestroyHash(hmac->hash); + hmac->hash = 0; + if (hmac->provider) + CryptReleaseContext(hmac->provider, 0); + hmac->provider = 0; + memset(&hmac->info, 0, sizeof(hmac->info)); +} + +void mz_crypt_hmac_reset(void *handle) { + mz_crypt_hmac_free(handle); +} + +int32_t mz_crypt_hmac_init(void *handle, const void *key, int32_t key_length) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + ALG_ID alg_id = 0; + typedef struct key_blob_header_s { + BLOBHEADER hdr; + uint32_t key_length; + } key_blob_header_s; + key_blob_header_s *key_blob_s = NULL; + uint8_t *key_blob = NULL; + int32_t key_blob_size = 0; + int32_t result = 0; + int32_t err = MZ_OK; + + + if (hmac == NULL || key == NULL) + return MZ_PARAM_ERROR; + + mz_crypt_hmac_reset(handle); + + if (hmac->algorithm == MZ_HASH_SHA1) + alg_id = CALG_SHA1; + else + alg_id = CALG_SHA_256; + + hmac->info.HashAlgid = alg_id; + + result = CryptAcquireContext(&hmac->provider, NULL, MS_ENHANCED_PROV, PROV_RSA_FULL, + CRYPT_VERIFYCONTEXT | CRYPT_SILENT); + + if (!result) { + hmac->error = GetLastError(); + err = MZ_CRYPT_ERROR; + } else { + /* Zero-pad odd key lengths */ + if (key_length % 2 == 1) + key_length += 1; + key_blob_size = sizeof(key_blob_header_s) + key_length; + key_blob = (uint8_t *)MZ_ALLOC(key_blob_size); + } + + if (key_blob) { + memset(key_blob, 0, key_blob_size); + key_blob_s = (key_blob_header_s *)key_blob; + key_blob_s->hdr.bType = PLAINTEXTKEYBLOB; + key_blob_s->hdr.bVersion = CUR_BLOB_VERSION; + key_blob_s->hdr.aiKeyAlg = CALG_RC2; + key_blob_s->hdr.reserved = 0; + key_blob_s->key_length = key_length; + + memcpy(key_blob + sizeof(key_blob_header_s), key, key_length); + + result = CryptImportKey(hmac->provider, key_blob, key_blob_size, 0, CRYPT_IPSEC_HMAC_KEY, &hmac->key); + if (result) + result = CryptCreateHash(hmac->provider, CALG_HMAC, hmac->key, 0, &hmac->hash); + if (result) + result = CryptSetHashParam(hmac->hash, HP_HMAC_INFO, (uint8_t *)&hmac->info, 0); + + SecureZeroMemory(key_blob, key_blob_size); + MZ_FREE(key_blob); + } else if (err == MZ_OK) { + err = MZ_MEM_ERROR; + } + + if (!result) { + hmac->error = GetLastError(); + err = MZ_CRYPT_ERROR; + } + + if (err != MZ_OK) + mz_crypt_hmac_free(handle); + + return err; +} + +int32_t mz_crypt_hmac_update(void *handle, const void *buf, int32_t size) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + int32_t result = 0; + + if (hmac == NULL || buf == NULL || hmac->hash == 0) + return MZ_PARAM_ERROR; + + result = CryptHashData(hmac->hash, buf, size, 0); + if (!result) { + hmac->error = GetLastError(); + return MZ_HASH_ERROR; + } + return MZ_OK; +} + +int32_t mz_crypt_hmac_end(void *handle, uint8_t *digest, int32_t digest_size) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + int32_t result = 0; + int32_t expected_size = 0; + + if (hmac == NULL || digest == NULL || hmac->hash == 0) + return MZ_PARAM_ERROR; + result = CryptGetHashParam(hmac->hash, HP_HASHVAL, NULL, (DWORD *)&expected_size, 0); + if (expected_size > digest_size) + return MZ_BUF_ERROR; + if (!result) + return MZ_HASH_ERROR; + result = CryptGetHashParam(hmac->hash, HP_HASHVAL, digest, (DWORD *)&digest_size, 0); + if (!result) { + hmac->error = GetLastError(); + return MZ_HASH_ERROR; + } + return MZ_OK; +} + +void mz_crypt_hmac_set_algorithm(void *handle, uint16_t algorithm) { + mz_crypt_hmac *hmac = (mz_crypt_hmac *)handle; + hmac->algorithm = algorithm; +} + +int32_t mz_crypt_hmac_copy(void *src_handle, void *target_handle) { + mz_crypt_hmac *source = (mz_crypt_hmac *)src_handle; + mz_crypt_hmac *target = (mz_crypt_hmac *)target_handle; + int32_t result = 0; + int32_t err = MZ_OK; + + if (target->hash) { + CryptDestroyHash(target->hash); + target->hash = 0; + } + + result = CryptDuplicateHash(source->hash, NULL, 0, &target->hash); + + if (!result) { + target->error = GetLastError(); + err = MZ_HASH_ERROR; + } + return err; +} + +void *mz_crypt_hmac_create(void **handle) { + mz_crypt_hmac *hmac = NULL; + + hmac = (mz_crypt_hmac *)MZ_ALLOC(sizeof(mz_crypt_hmac)); + if (hmac != NULL) { + memset(hmac, 0, sizeof(mz_crypt_hmac)); + hmac->algorithm = MZ_HASH_SHA256; + } + if (handle != NULL) + *handle = hmac; + + return hmac; +} + +void mz_crypt_hmac_delete(void **handle) { + mz_crypt_hmac *hmac = NULL; + if (handle == NULL) + return; + hmac = (mz_crypt_hmac *)*handle; + if (hmac != NULL) { + mz_crypt_hmac_free(*handle); + MZ_FREE(hmac); + } + *handle = NULL; +} + +/***************************************************************************/ + +#if defined(MZ_ZIP_SIGNING) +int32_t mz_crypt_sign(uint8_t *message, int32_t message_size, uint8_t *cert_data, int32_t cert_data_size, + const char *cert_pwd, uint8_t **signature, int32_t *signature_size) { + CRYPT_SIGN_MESSAGE_PARA sign_params; + CRYPT_DATA_BLOB cert_data_blob; + PCCERT_CONTEXT cert_context = NULL; + HCERTSTORE cert_store = 0; + wchar_t *password_wide = NULL; + int32_t result = 0; + int32_t err = MZ_OK; + uint32_t messages_sizes[1]; + uint8_t *messages[1]; + + + if (message == NULL || cert_data == NULL || signature == NULL || signature_size == NULL) + return MZ_PARAM_ERROR; + + *signature = NULL; + *signature_size = 0; + + cert_data_blob.pbData = cert_data; + cert_data_blob.cbData = cert_data_size; + + password_wide = mz_os_unicode_string_create(cert_pwd, MZ_ENCODING_UTF8); + if (password_wide) { + cert_store = PFXImportCertStore(&cert_data_blob, password_wide, 0); + mz_os_unicode_string_delete(&password_wide); + } + + if (cert_store == NULL) + cert_store = PFXImportCertStore(&cert_data_blob, L"", 0); + if (cert_store == NULL) + cert_store = PFXImportCertStore(&cert_data_blob, NULL, 0); + if (cert_store == NULL) + return MZ_PARAM_ERROR; + + if (err == MZ_OK) { + cert_context = CertFindCertificateInStore(cert_store, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, 0, CERT_FIND_HAS_PRIVATE_KEY, NULL, NULL); + if (cert_context == NULL) + err = MZ_PARAM_ERROR; + } + if (err == MZ_OK) { + memset(&sign_params, 0, sizeof(sign_params)); + + sign_params.cbSize = sizeof(sign_params); + sign_params.dwMsgEncodingType = PKCS_7_ASN_ENCODING | X509_ASN_ENCODING; + sign_params.pSigningCert = cert_context; + sign_params.HashAlgorithm.pszObjId = szOID_NIST_sha256; + sign_params.cMsgCert = 1; + sign_params.rgpMsgCert = &cert_context; + + messages[0] = message; + messages_sizes[0] = message_size; + +#if 0 /* Timestamp support */ + CRYPT_ATTR_BLOB crypt_blob; + CRYPT_TIMESTAMP_CONTEXT *ts_context = NULL; + CRYPT_ATTRIBUTE unauth_attribs[1]; + wchar_t *timestamp_url_wide = NULL; + const char *timestamp_url = NULL; + + if (timestamp_url != NULL) + timestamp_url_wide = mz_os_unicode_string_create(timestamp_url); + if (timestamp_url_wide != NULL) { + result = CryptRetrieveTimeStamp(timestamp_url_wide, + TIMESTAMP_NO_AUTH_RETRIEVAL | TIMESTAMP_VERIFY_CONTEXT_SIGNATURE, 0, szOID_NIST_sha256, + NULL, message, message_size, &ts_context, NULL, NULL); + + mz_os_unicode_string_delete(×tamp_url_wide); + + if ((result) && (ts_context != NULL)) { + crypt_blob.cbData = ts_context->cbEncoded; + crypt_blob.pbData = ts_context->pbEncoded; + + unauth_attribs[0].pszObjId = "1.2.840.113549.1.9.16.2.14"; //id-smime-aa-timeStampToken + unauth_attribs[0].cValue = 1; + unauth_attribs[0].rgValue = &crypt_blob; + + sign_params.rgUnauthAttr = &unauth_attribs[0]; + sign_params.cUnauthAttr = 1; + } + } + + if (ts_context != NULL) + CryptMemFree(ts_context); + + if (result) +#endif + + result = CryptSignMessage(&sign_params, FALSE, 1, (const BYTE **)messages, (DWORD *)messages_sizes, + NULL, (DWORD *)signature_size); + + if (result && *signature_size > 0) + *signature = (uint8_t *)MZ_ALLOC(*signature_size); + + if (result && *signature != NULL) + result = CryptSignMessage(&sign_params, FALSE, 1, (const BYTE **)messages, (DWORD *)messages_sizes, + *signature, (DWORD *)signature_size); + + if (!result) + err = MZ_SIGN_ERROR; + } + + if (cert_context != NULL) + CertFreeCertificateContext(cert_context); + if (cert_store != NULL) + CertCloseStore(cert_store, 0); + + return err; +} + +int32_t mz_crypt_sign_verify(uint8_t *message, int32_t message_size, uint8_t *signature, int32_t signature_size) { + CRYPT_VERIFY_MESSAGE_PARA verify_params; + CERT_CONTEXT *signer_cert = NULL; + CERT_CHAIN_PARA chain_para; + CERT_CHAIN_CONTEXT *chain_context = NULL; + CERT_CHAIN_POLICY_PARA chain_policy; + CERT_CHAIN_POLICY_STATUS chain_policy_status; + HCRYPTMSG crypt_msg = 0; + int32_t result = 0; + int32_t err = MZ_SIGN_ERROR; + uint8_t *decoded = NULL; + int32_t decoded_size = 0; + + + memset(&verify_params, 0, sizeof(verify_params)); + + verify_params.cbSize = sizeof(verify_params); + verify_params.dwMsgAndCertEncodingType = PKCS_7_ASN_ENCODING | X509_ASN_ENCODING; + + result = CryptVerifyMessageSignature(&verify_params, 0, signature, signature_size, + NULL, (DWORD *)&decoded_size, NULL); + + if (result && decoded_size > 0) + decoded = (uint8_t *)MZ_ALLOC(decoded_size); + + if (result && decoded != NULL) + result = CryptVerifyMessageSignature(&verify_params, 0, signature, signature_size, + decoded, (DWORD *)&decoded_size, (const CERT_CONTEXT **)&signer_cert); + + /* Get and validate certificate chain */ + memset(&chain_para, 0, sizeof(chain_para)); + + if (result && signer_cert != NULL) + result = CertGetCertificateChain(NULL, signer_cert, NULL, NULL, &chain_para, + CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT, NULL, (const CERT_CHAIN_CONTEXT **)&chain_context); + + memset(&chain_policy, 0, sizeof(chain_policy)); + chain_policy.cbSize = sizeof(CERT_CHAIN_POLICY_PARA); + + memset(&chain_policy_status, 0, sizeof(chain_policy_status)); + chain_policy_status.cbSize = sizeof(CERT_CHAIN_POLICY_STATUS); + + if (result && chain_context != NULL) + result = CertVerifyCertificateChainPolicy(CERT_CHAIN_POLICY_BASE, chain_context, + &chain_policy, &chain_policy_status); + + if (chain_policy_status.dwError != S_OK) + result = 0; + +#if 0 + crypt_msg = CryptMsgOpenToDecode(PKCS_7_ASN_ENCODING | X509_ASN_ENCODING, 0, 0, 0, NULL, NULL); + if (crypt_msg != NULL) { + /* Timestamp support */ + PCRYPT_ATTRIBUTES unauth_attribs = NULL; + HCRYPTMSG ts_msg = 0; + uint8_t *ts_content = NULL; + int32_t ts_content_size = 0; + uint8_t *ts_signature = NULL; + int32_t ts_signature_size = 0; + + result = CryptMsgUpdate(crypt_msg, signature, signature_size, 1); + + if (result) + CryptMsgGetParam(crypt_msg, CMSG_SIGNER_UNAUTH_ATTR_PARAM, 0, NULL, &ts_signature_size); + + if ((result) && (ts_signature_size > 0)) + ts_signature = (uint8_t *)MZ_ALLOC(ts_signature_size); + + if ((result) && (ts_signature != NULL)) { + result = CryptMsgGetParam(crypt_msg, CMSG_SIGNER_UNAUTH_ATTR_PARAM, 0, ts_signature, + &ts_signature_size); + if (result) + { + unauth_attribs = (PCRYPT_ATTRIBUTES)ts_signature; + + if ((unauth_attribs->cAttr > 0) && (unauth_attribs->rgAttr[0].cValue > 0)) + { + ts_content = unauth_attribs->rgAttr[0].rgValue->pbData; + ts_content_size = unauth_attribs->rgAttr[0].rgValue->cbData; + } + } + + if ((result) && (ts_content != NULL)) + result = CryptVerifyTimeStampSignature(ts_content, ts_content_size, decoded, + decoded_size, 0, &crypt_context, NULL, NULL); + + if (result) + err = MZ_OK; + } + + if (ts_signature != NULL) + MZ_FREE(ts_signature); + + if (crypt_context != NULL) + CryptMemFree(crypt_context); + } else { + result = 0; + } +#endif + + if ((result) && (decoded != NULL) && (decoded_size == message_size)) { + /* Verify cms message with our stored message */ + if (memcmp(decoded, message, message_size) == 0) + err = MZ_OK; + } + + if (chain_context != NULL) + CertFreeCertificateChain(chain_context); + if (signer_cert != NULL) + CertFreeCertificateContext(signer_cert); + if (crypt_msg != NULL) + CryptMsgClose(crypt_msg); + + if (decoded != NULL) + MZ_FREE(decoded); + + return err; +} +#endif diff --git a/Externals/minizip/mz_os.c b/Externals/minizip/mz_os.c new file mode 100644 index 000000000000..f96befe64c1b --- /dev/null +++ b/Externals/minizip/mz_os.c @@ -0,0 +1,354 @@ +/* mz_os.c -- System functions + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_crypt.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_os.h" + +#include /* tolower */ + +/***************************************************************************/ + +int32_t mz_path_combine(char *path, const char *join, int32_t max_path) { + int32_t path_len = 0; + + if (path == NULL || join == NULL || max_path == 0) + return MZ_PARAM_ERROR; + + path_len = (int32_t)strlen(path); + + if (path_len == 0) { + strncpy(path, join, max_path - 1); + path[max_path - 1] = 0; + } else { + mz_path_append_slash(path, max_path, MZ_PATH_SLASH_PLATFORM); + strncat(path, join, max_path - path_len); + } + + return MZ_OK; +} + +int32_t mz_path_append_slash(char *path, int32_t max_path, char slash) { + int32_t path_len = (int32_t)strlen(path); + if ((path_len + 2) >= max_path) + return MZ_BUF_ERROR; + if (path[path_len - 1] != '\\' && path[path_len - 1] != '/') { + path[path_len] = slash; + path[path_len + 1] = 0; + } + return MZ_OK; +} + +int32_t mz_path_remove_slash(char *path) { + int32_t path_len = (int32_t)strlen(path); + while (path_len > 0) { + if (path[path_len - 1] == '\\' || path[path_len - 1] == '/') + path[path_len - 1] = 0; + else + break; + + path_len -= 1; + } + return MZ_OK; +} + +int32_t mz_path_has_slash(const char *path) { + int32_t path_len = (int32_t)strlen(path); + if (path[path_len - 1] != '\\' && path[path_len - 1] != '/') + return MZ_EXIST_ERROR; + return MZ_OK; +} + +int32_t mz_path_convert_slashes(char *path, char slash) { + int32_t i = 0; + + for (i = 0; i < (int32_t)strlen(path); i += 1) { + if (path[i] == '\\' || path[i] == '/') + path[i] = slash; + } + return MZ_OK; +} + +int32_t mz_path_compare_wc(const char *path, const char *wildcard, uint8_t ignore_case) { + while (*path != 0) { + switch (*wildcard) { + case '*': + + if (*(wildcard + 1) == 0) + return MZ_OK; + + while (*path != 0) { + if (mz_path_compare_wc(path, (wildcard + 1), ignore_case) == MZ_OK) + return MZ_OK; + + path += 1; + } + + return MZ_EXIST_ERROR; + + default: + /* Ignore differences in path slashes on platforms */ + if ((*path == '\\' && *wildcard == '/') || (*path == '/' && *wildcard == '\\')) + break; + + if (ignore_case) { + if (tolower(*path) != tolower(*wildcard)) + return MZ_EXIST_ERROR; + } else { + if (*path != *wildcard) + return MZ_EXIST_ERROR; + } + + break; + } + + path += 1; + wildcard += 1; + } + + if ((*wildcard != 0) && (*wildcard != '*')) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int32_t mz_path_resolve(const char *path, char *output, int32_t max_output) { + const char *source = path; + const char *check = output; + char *target = output; + + + if (max_output <= 0) + return MZ_PARAM_ERROR; + + while (*source != 0 && max_output > 1) { + check = source; + if ((*check == '\\') || (*check == '/')) + check += 1; + + if ((source == path) || (target == output) || (check != source)) { + /* Skip double paths */ + if ((*check == '\\') || (*check == '/')) { + source += 1; + continue; + } + if (*check == '.') { + check += 1; + + /* Remove . if at end of string and not at the beginning */ + if ((*check == 0) && (source != path && target != output)) { + /* Copy last slash */ + *target = *source; + target += 1; + max_output -= 1; + source += (check - source); + continue; + } + /* Remove . if not at end of string */ + else if ((*check == '\\') || (*check == '/')) { + source += (check - source); + /* Skip slash if at beginning of string */ + if (target == output && *source != 0) + source += 1; + continue; + } + /* Go to parent directory .. */ + else if (*check == '.') { + check += 1; + if ((*check == 0) || (*check == '\\' || *check == '/')) { + source += (check - source); + + /* Search backwards for previous slash */ + if (target != output) { + target -= 1; + do { + if ((*target == '\\') || (*target == '/')) + break; + + target -= 1; + max_output += 1; + } while (target > output); + } + + if ((target == output) && (*source != 0)) + source += 1; + if ((*target == '\\' || *target == '/') && (*source == 0)) + target += 1; + + *target = 0; + continue; + } + } + } + } + + *target = *source; + + source += 1; + target += 1; + max_output -= 1; + } + + *target = 0; + + if (*path == 0) + return MZ_INTERNAL_ERROR; + + return MZ_OK; +} + +int32_t mz_path_remove_filename(char *path) { + char *path_ptr = NULL; + + if (path == NULL) + return MZ_PARAM_ERROR; + + path_ptr = path + strlen(path) - 1; + + while (path_ptr > path) { + if ((*path_ptr == '/') || (*path_ptr == '\\')) { + *path_ptr = 0; + break; + } + + path_ptr -= 1; + } + + if (path_ptr == path) + *path_ptr = 0; + + return MZ_OK; +} + +int32_t mz_path_remove_extension(char *path) { + char *path_ptr = NULL; + + if (path == NULL) + return MZ_PARAM_ERROR; + + path_ptr = path + strlen(path) - 1; + + while (path_ptr > path) { + if ((*path_ptr == '/') || (*path_ptr == '\\')) + break; + if (*path_ptr == '.') { + *path_ptr = 0; + break; + } + + path_ptr -= 1; + } + + if (path_ptr == path) + *path_ptr = 0; + + return MZ_OK; +} + +int32_t mz_path_get_filename(const char *path, const char **filename) { + const char *match = NULL; + + if (path == NULL || filename == NULL) + return MZ_PARAM_ERROR; + + *filename = NULL; + + for (match = path; *match != 0; match += 1) { + if ((*match == '\\') || (*match == '/')) + *filename = match + 1; + } + + if (*filename == NULL) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int32_t mz_dir_make(const char *path) { + int32_t err = MZ_OK; + int16_t len = 0; + char *current_dir = NULL; + char *match = NULL; + char hold = 0; + + + len = (int16_t)strlen(path); + if (len <= 0) + return 0; + + current_dir = (char *)MZ_ALLOC((uint16_t)len + 1); + if (current_dir == NULL) + return MZ_MEM_ERROR; + + strcpy(current_dir, path); + mz_path_remove_slash(current_dir); + + err = mz_os_make_dir(current_dir); + if (err != MZ_OK) { + match = current_dir + 1; + while (1) { + while (*match != 0 && *match != '\\' && *match != '/') + match += 1; + hold = *match; + *match = 0; + + err = mz_os_make_dir(current_dir); + if (err != MZ_OK) + break; + if (hold == 0) + break; + + *match = hold; + match += 1; + } + } + + MZ_FREE(current_dir); + return err; +} + +int32_t mz_file_get_crc(const char *path, uint32_t *result_crc) { + void *stream = NULL; + uint32_t crc32 = 0; + int32_t read = 0; + int32_t err = MZ_OK; + uint8_t buf[16384]; + + mz_stream_os_create(&stream); + + err = mz_stream_os_open(stream, path, MZ_OPEN_MODE_READ); + + if (err == MZ_OK) { + do { + read = mz_stream_os_read(stream, buf, sizeof(buf)); + + if (read < 0) { + err = read; + break; + } + + crc32 = mz_crypt_crc32_update(crc32, buf, read); + } while ((err == MZ_OK) && (read > 0)); + + mz_stream_os_close(stream); + } + + *result_crc = crc32; + + mz_stream_os_delete(&stream); + + return err; +} + +/***************************************************************************/ diff --git a/Externals/minizip/mz_os.h b/Externals/minizip/mz_os.h new file mode 100644 index 000000000000..b3e2a58c1966 --- /dev/null +++ b/Externals/minizip/mz_os.h @@ -0,0 +1,175 @@ +/* mz_os.h -- System functions + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_OS_H +#define MZ_OS_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +#if defined(__APPLE__) +# define MZ_VERSION_MADEBY_HOST_SYSTEM (MZ_HOST_SYSTEM_OSX_DARWIN) +#elif defined(__riscos__) +# define MZ_VERSION_MADEBY_HOST_SYSTEM (MZ_HOST_SYSTEM_RISCOS) +#elif defined(_WIN32) +# define MZ_VERSION_MADEBY_HOST_SYSTEM (MZ_HOST_SYSTEM_WINDOWS_NTFS) +#else +# define MZ_VERSION_MADEBY_HOST_SYSTEM (MZ_HOST_SYSTEM_UNIX) +#endif + +#if defined(HAVE_LZMA) || defined(HAVE_LIBCOMP) +# define MZ_VERSION_MADEBY_ZIP_VERSION (63) +#elif defined(HAVE_WZAES) +# define MZ_VERSION_MADEBY_ZIP_VERSION (51) +#elif defined(HAVE_BZIP2) +# define MZ_VERSION_MADEBY_ZIP_VERSION (46) +#else +# define MZ_VERSION_MADEBY_ZIP_VERSION (45) +#endif + +#define MZ_VERSION_MADEBY ((MZ_VERSION_MADEBY_HOST_SYSTEM << 8) | \ + (MZ_VERSION_MADEBY_ZIP_VERSION)) + +#define MZ_PATH_SLASH_UNIX ('/') +#if defined(_WIN32) +# define MZ_PATH_SLASH_PLATFORM ('\\') +#else +# define MZ_PATH_SLASH_PLATFORM (MZ_PATH_SLASH_UNIX) +#endif + +/***************************************************************************/ + +#if defined(_WIN32) +struct dirent { + char d_name[256]; +}; +typedef void* DIR; +#else +#include +#endif + +/***************************************************************************/ +/* Shared functions */ + +int32_t mz_path_combine(char *path, const char *join, int32_t max_path); +/* Combines two paths */ + +int32_t mz_path_append_slash(char *path, int32_t max_path, char slash); +/* Appends a path slash on to the end of the path */ + +int32_t mz_path_remove_slash(char *path); +/* Removes a path slash from the end of the path */ + +int32_t mz_path_has_slash(const char *path); +/* Returns whether or not the path ends with slash */ + +int32_t mz_path_convert_slashes(char *path, char slash); +/* Converts the slashes in a path */ + +int32_t mz_path_compare_wc(const char *path, const char *wildcard, uint8_t ignore_case); +/* Compare two paths with wildcard */ + +int32_t mz_path_resolve(const char *path, char *target, int32_t max_target); +/* Resolves path */ + +int32_t mz_path_remove_filename(char *path); +/* Remove the filename from a path */ + +int32_t mz_path_remove_extension(char *path); +/* Remove the extension from a path */ + +int32_t mz_path_get_filename(const char *path, const char **filename); +/* Get the filename from a path */ + +int32_t mz_dir_make(const char *path); +/* Creates a directory recursively */ + +int32_t mz_file_get_crc(const char *path, uint32_t *result_crc); +/* Gets the crc32 hash of a file */ + +/***************************************************************************/ +/* Platform specific functions */ + +wchar_t *mz_os_unicode_string_create(const char *string, int32_t encoding); +/* Create unicode string from a utf8 string */ + +void mz_os_unicode_string_delete(wchar_t **string); +/* Delete a unicode string that was created */ + +uint8_t *mz_os_utf8_string_create(const char *string, int32_t encoding); +/* Create a utf8 string from a string with another encoding */ + +void mz_os_utf8_string_delete(uint8_t **string); +/* Delete a utf8 string that was created */ + +int32_t mz_os_rand(uint8_t *buf, int32_t size); +/* Random number generator (not cryptographically secure) */ + +int32_t mz_os_rename(const char *source_path, const char *target_path); +/* Rename a file */ + +int32_t mz_os_unlink(const char *path); +/* Delete an existing file */ + +int32_t mz_os_file_exists(const char *path); +/* Check to see if a file exists */ + +int64_t mz_os_get_file_size(const char *path); +/* Gets the length of a file */ + +int32_t mz_os_get_file_date(const char *path, time_t *modified_date, time_t *accessed_date, time_t *creation_date); +/* Gets a file's modified, access, and creation dates if supported */ + +int32_t mz_os_set_file_date(const char *path, time_t modified_date, time_t accessed_date, time_t creation_date); +/* Sets a file's modified, access, and creation dates if supported */ + +int32_t mz_os_get_file_attribs(const char *path, uint32_t *attributes); +/* Gets a file's attributes */ + +int32_t mz_os_set_file_attribs(const char *path, uint32_t attributes); +/* Sets a file's attributes */ + +int32_t mz_os_make_dir(const char *path); +/* Recursively creates a directory */ + +DIR* mz_os_open_dir(const char *path); +/* Opens a directory for listing */ +struct +dirent* mz_os_read_dir(DIR *dir); +/* Reads a directory listing entry */ + +int32_t mz_os_close_dir(DIR *dir); +/* Closes a directory that has been opened for listing */ + +int32_t mz_os_is_dir(const char *path); +/* Checks to see if path is a directory */ + +int32_t mz_os_is_symlink(const char *path); +/* Checks to see if path is a symbolic link */ + +int32_t mz_os_make_symlink(const char *path, const char *target_path); +/* Creates a symbolic link pointing to a target */ + +int32_t mz_os_read_symlink(const char *path, char *target_path, int32_t max_target_path); +/* Gets the target path for a symbolic link */ + +uint64_t mz_os_ms_time(void); +/* Gets the time in milliseconds */ + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_os_posix.c b/Externals/minizip/mz_os_posix.c new file mode 100644 index 000000000000..576943d16206 --- /dev/null +++ b/Externals/minizip/mz_os_posix.c @@ -0,0 +1,367 @@ +/* mz_os_posix.c -- System functions for posix + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_strm.h" +#include "mz_os.h" + +#include /* rename */ +#include +#if defined(HAVE_ICONV) +#include +#endif + +#include +#include + +#ifndef _WIN32 +# include +# include +#endif +#if defined(__APPLE__) +# include +# include +#endif + +#if defined(HAVE_GETRANDOM) +# include +#endif +#if defined(HAVE_LIBBSD) +# include +# ifndef __u_char_defined + typedef unsigned char u_char; +# endif +# include /* arc4random_buf */ +#endif + +/***************************************************************************/ + +#if defined(HAVE_ICONV) +uint8_t *mz_os_utf8_string_create(const char *string, int32_t encoding) { + iconv_t cd; + const char *from_encoding = NULL; + size_t result = 0; + size_t string_length = 0; + size_t string_utf8_size = 0; + uint8_t *string_utf8 = NULL; + uint8_t *string_utf8_ptr = NULL; + + if (string == NULL) + return NULL; + + if (encoding == MZ_ENCODING_CODEPAGE_437) + from_encoding = "CP437"; + else if (encoding == MZ_ENCODING_CODEPAGE_932) + from_encoding = "CP932"; + else if (encoding == MZ_ENCODING_CODEPAGE_936) + from_encoding = "CP936"; + else if (encoding == MZ_ENCODING_CODEPAGE_950) + from_encoding = "CP950"; + else if (encoding == MZ_ENCODING_UTF8) + from_encoding = "UTF-8"; + else + return NULL; + + cd = iconv_open("UTF-8", from_encoding); + if (cd == (iconv_t)-1) + return NULL; + + string_length = strlen(string); + string_utf8_size = string_length * 2; + string_utf8 = (uint8_t *)MZ_ALLOC((int32_t)(string_utf8_size + 1)); + string_utf8_ptr = string_utf8; + + if (string_utf8) { + memset(string_utf8, 0, string_utf8_size + 1); + + result = iconv(cd, (char **)&string, &string_length, + (char **)&string_utf8_ptr, &string_utf8_size); + } + + iconv_close(cd); + + if (result == (size_t)-1) { + MZ_FREE(string_utf8); + string_utf8 = NULL; + } + + return string_utf8; +} +#else +uint8_t *mz_os_utf8_string_create(const char *string, int32_t encoding) { + size_t string_length = 0; + uint8_t *string_copy = NULL; + + string_length = strlen(string); + string_copy = (uint8_t *)MZ_ALLOC((int32_t)(string_length + 1)); + strncpy((char *)string_copy, string, string_length); + string_copy[string_length] = 0; + + return string_copy; +} +#endif + +void mz_os_utf8_string_delete(uint8_t **string) { + if (string != NULL) { + MZ_FREE(*string); + *string = NULL; + } +} + +/***************************************************************************/ + +#if defined(HAVE_ARC4RANDOM_BUF) +int32_t mz_os_rand(uint8_t *buf, int32_t size) { + if (size < 0) + return 0; + arc4random_buf(buf, (uint32_t)size); + return size; +} +#elif defined(HAVE_ARC4RANDOM) +int32_t mz_os_rand(uint8_t *buf, int32_t size) { + int32_t left = size; + for (; left > 2; left -= 3, buf += 3) { + uint32_t val = arc4random(); + + buf[0] = (val) & 0xFF; + buf[1] = (val >> 8) & 0xFF; + buf[2] = (val >> 16) & 0xFF; + } + for (; left > 0; left--, buf++) { + *buf = arc4random() & 0xFF; + } + return size - left; +} +#elif defined(HAVE_GETRANDOM) +int32_t mz_os_rand(uint8_t *buf, int32_t size) { + int32_t left = size; + int32_t written = 0; + + while (left > 0) { + written = getrandom(buf, left, 0); + if (written < 0) + return MZ_INTERNAL_ERROR; + + buf += written; + left -= written; + } + return size - left; +} +#else +int32_t mz_os_rand(uint8_t *buf, int32_t size) { + static unsigned calls = 0; + int32_t i = 0; + + /* Ensure different random header each time */ + if (++calls == 1) { + #define PI_SEED 3141592654UL + srand((unsigned)(time(NULL) ^ PI_SEED)); + } + + while (i < size) + buf[i++] = (rand() >> 7) & 0xff; + + return size; +} +#endif + +int32_t mz_os_rename(const char *source_path, const char *target_path) { + if (rename(source_path, target_path) == -1) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int32_t mz_os_unlink(const char *path) { + if (unlink(path) == -1) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int32_t mz_os_file_exists(const char *path) { + struct stat path_stat; + + memset(&path_stat, 0, sizeof(path_stat)); + if (stat(path, &path_stat) == 0) + return MZ_OK; + return MZ_EXIST_ERROR; +} + +int64_t mz_os_get_file_size(const char *path) { + struct stat path_stat; + + memset(&path_stat, 0, sizeof(path_stat)); + if (stat(path, &path_stat) == 0) { + /* Stat returns size taken up by directory entry, so return 0 */ + if (S_ISDIR(path_stat.st_mode)) + return 0; + + return path_stat.st_size; + } + + return 0; +} + +int32_t mz_os_get_file_date(const char *path, time_t *modified_date, time_t *accessed_date, time_t *creation_date) { + struct stat path_stat; + char *name = NULL; + size_t len = 0; + int32_t err = MZ_INTERNAL_ERROR; + + memset(&path_stat, 0, sizeof(path_stat)); + + if (strcmp(path, "-") != 0) { + /* Not all systems allow stat'ing a file with / appended */ + len = strlen(path); + name = (char *)malloc(len + 1); + strncpy(name, path, len + 1); + mz_path_remove_slash(name); + + if (stat(name, &path_stat) == 0) { + if (modified_date != NULL) + *modified_date = path_stat.st_mtime; + if (accessed_date != NULL) + *accessed_date = path_stat.st_atime; + /* Creation date not supported */ + if (creation_date != NULL) + *creation_date = 0; + + err = MZ_OK; + } + + free(name); + } + + return err; +} + +int32_t mz_os_set_file_date(const char *path, time_t modified_date, time_t accessed_date, time_t creation_date) { + struct utimbuf ut; + + ut.actime = accessed_date; + ut.modtime = modified_date; + + /* Creation date not supported */ + MZ_UNUSED(creation_date); + + if (utime(path, &ut) != 0) + return MZ_INTERNAL_ERROR; + + return MZ_OK; +} + +int32_t mz_os_get_file_attribs(const char *path, uint32_t *attributes) { + struct stat path_stat; + int32_t err = MZ_OK; + + memset(&path_stat, 0, sizeof(path_stat)); + if (lstat(path, &path_stat) == -1) + err = MZ_INTERNAL_ERROR; + *attributes = path_stat.st_mode; + return err; +} + +int32_t mz_os_set_file_attribs(const char *path, uint32_t attributes) { + int32_t err = MZ_OK; + + if (chmod(path, (mode_t)attributes) == -1) + err = MZ_INTERNAL_ERROR; + + return err; +} + +int32_t mz_os_make_dir(const char *path) { + int32_t err = 0; + + err = mkdir(path, 0755); + + if (err != 0 && errno != EEXIST) + return MZ_INTERNAL_ERROR; + + return MZ_OK; +} + +DIR* mz_os_open_dir(const char *path) { + return opendir(path); +} + +struct dirent* mz_os_read_dir(DIR *dir) { + if (dir == NULL) + return NULL; + return readdir(dir); +} + +int32_t mz_os_close_dir(DIR *dir) { + if (dir == NULL) + return MZ_PARAM_ERROR; + if (closedir(dir) == -1) + return MZ_INTERNAL_ERROR; + return MZ_OK; +} + +int32_t mz_os_is_dir(const char *path) { + struct stat path_stat; + + memset(&path_stat, 0, sizeof(path_stat)); + stat(path, &path_stat); + if (S_ISDIR(path_stat.st_mode)) + return MZ_OK; + + return MZ_EXIST_ERROR; +} + +int32_t mz_os_is_symlink(const char *path) { + struct stat path_stat; + + memset(&path_stat, 0, sizeof(path_stat)); + lstat(path, &path_stat); + if (S_ISLNK(path_stat.st_mode)) + return MZ_OK; + + return MZ_EXIST_ERROR; +} + +int32_t mz_os_make_symlink(const char *path, const char *target_path) { + if (symlink(target_path, path) != 0) + return MZ_INTERNAL_ERROR; + return MZ_OK; +} + +int32_t mz_os_read_symlink(const char *path, char *target_path, int32_t max_target_path) { + size_t length = 0; + + length = (size_t)readlink(path, target_path, max_target_path - 1); + if (length == (size_t)-1) + return MZ_EXIST_ERROR; + + target_path[length] = 0; + return MZ_OK; +} + +uint64_t mz_os_ms_time(void) { + struct timespec ts; + +#if defined(__APPLE__) + clock_serv_t cclock; + mach_timespec_t mts; + + host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock); + clock_get_time(cclock, &mts); + mach_port_deallocate(mach_task_self(), cclock); + + ts.tv_sec = mts.tv_sec; + ts.tv_nsec = mts.tv_nsec; +#else + clock_gettime(CLOCK_MONOTONIC, &ts); +#endif + + return ((uint64_t)ts.tv_sec * 1000) + ((uint64_t)ts.tv_nsec / 1000000); +} diff --git a/Externals/minizip/mz_os_win32.c b/Externals/minizip/mz_os_win32.c new file mode 100644 index 000000000000..182bdbb84030 --- /dev/null +++ b/Externals/minizip/mz_os_win32.c @@ -0,0 +1,659 @@ +/* mz_os_win32.c -- System functions for Windows + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_strm_os.h" + +#include +#include + +/***************************************************************************/ + +#if defined(WINAPI_FAMILY_PARTITION) && (!(defined(MZ_WINRT_API))) +# if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) +# define MZ_WINRT_API 1 +# endif +#endif + +#ifndef SYMBOLIC_LINK_FLAG_DIRECTORY +# define SYMBOLIC_LINK_FLAG_DIRECTORY 0x1 +#endif + +/***************************************************************************/ + +typedef struct DIR_int_s { + void *find_handle; + WIN32_FIND_DATAW find_data; + struct dirent entry; + uint8_t end; +} DIR_int; + +/***************************************************************************/ + +wchar_t *mz_os_unicode_string_create(const char *string, int32_t encoding) { + wchar_t *string_wide = NULL; + uint32_t string_wide_size = 0; + + string_wide_size = MultiByteToWideChar(encoding, 0, string, -1, NULL, 0); + if (string_wide_size == 0) + return NULL; + string_wide = (wchar_t *)MZ_ALLOC((string_wide_size + 1) * sizeof(wchar_t)); + if (string_wide == NULL) + return NULL; + + memset(string_wide, 0, sizeof(wchar_t) * (string_wide_size + 1)); + MultiByteToWideChar(encoding, 0, string, -1, string_wide, string_wide_size); + + return string_wide; +} + +void mz_os_unicode_string_delete(wchar_t **string) { + if (string != NULL) { + MZ_FREE(*string); + *string = NULL; + } +} + +uint8_t *mz_os_utf8_string_create(const char *string, int32_t encoding) { + wchar_t *string_wide = NULL; + uint8_t *string_utf8 = NULL; + uint32_t string_utf8_size = 0; + + string_wide = mz_os_unicode_string_create(string, encoding); + if (string_wide) { + string_utf8_size = WideCharToMultiByte(CP_UTF8, 0, string_wide, -1, NULL, 0, NULL, NULL); + string_utf8 = (uint8_t *)MZ_ALLOC((string_utf8_size + 1) * sizeof(wchar_t)); + + if (string_utf8) { + memset(string_utf8, 0, string_utf8_size + 1); + WideCharToMultiByte(CP_UTF8, 0, string_wide, -1, (char *)string_utf8, string_utf8_size, NULL, NULL); + } + + mz_os_unicode_string_delete(&string_wide); + } + + return string_utf8; +} + +uint8_t *mz_os_utf8_string_create_from_unicode(const wchar_t *string, int32_t encoding) { + uint8_t *string_utf8 = NULL; + uint32_t string_utf8_size = 0; + + MZ_UNUSED(encoding); + + string_utf8_size = WideCharToMultiByte(CP_UTF8, 0, string, -1, NULL, 0, NULL, NULL); + string_utf8 = (uint8_t *)MZ_ALLOC((string_utf8_size + 1) * sizeof(wchar_t)); + + if (string_utf8) { + memset(string_utf8, 0, string_utf8_size + 1); + WideCharToMultiByte(CP_UTF8, 0, string, -1, (char *)string_utf8, string_utf8_size, NULL, NULL); + } + + return string_utf8; +} + +void mz_os_utf8_string_delete(uint8_t **string) { + if (string != NULL) { + MZ_FREE(*string); + *string = NULL; + } +} + +/***************************************************************************/ + +int32_t mz_os_rand(uint8_t *buf, int32_t size) { + unsigned __int64 pentium_tsc[1]; + int32_t len = 0; + + for (len = 0; len < (int)size; len += 1) { + if (len % 8 == 0) + QueryPerformanceCounter((LARGE_INTEGER *)pentium_tsc); + buf[len] = ((unsigned char*)pentium_tsc)[len % 8]; + } + + return len; +} + +int32_t mz_os_rename(const char *source_path, const char *target_path) { + wchar_t *source_path_wide = NULL; + wchar_t *target_path_wide = NULL; + int32_t result = 0; + int32_t err = MZ_OK; + + if (source_path == NULL || target_path == NULL) + return MZ_PARAM_ERROR; + + source_path_wide = mz_os_unicode_string_create(source_path, MZ_ENCODING_UTF8); + if (source_path_wide == NULL) { + err = MZ_PARAM_ERROR; + } else { + target_path_wide = mz_os_unicode_string_create(target_path, MZ_ENCODING_UTF8); + if (target_path_wide == NULL) + err = MZ_PARAM_ERROR; + } + + if (err == MZ_OK) { +#ifdef MZ_WINRT_API + result = MoveFileExW(source_path_wide, target_path_wide, MOVEFILE_WRITE_THROUGH); +#else + result = MoveFileW(source_path_wide, target_path_wide); +#endif + if (result == 0) + err = MZ_EXIST_ERROR; + } + + if (target_path_wide) + mz_os_unicode_string_delete(&target_path_wide); + if (source_path_wide) + mz_os_unicode_string_delete(&source_path_wide); + + return err; +} + +int32_t mz_os_unlink(const char *path) { + wchar_t *path_wide = NULL; + int32_t result = 0; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + if (mz_os_is_dir(path) == MZ_OK) + result = RemoveDirectoryW(path_wide); + else + result = DeleteFileW(path_wide); + + mz_os_unicode_string_delete(&path_wide); + + if (result == 0) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int32_t mz_os_file_exists(const char *path) { + wchar_t *path_wide = NULL; + DWORD attribs = 0; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + attribs = GetFileAttributesW(path_wide); + mz_os_unicode_string_delete(&path_wide); + + if (attribs == 0xFFFFFFFF) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int64_t mz_os_get_file_size(const char *path) { + HANDLE handle = NULL; + LARGE_INTEGER large_size; + wchar_t *path_wide = NULL; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; +#ifdef MZ_WINRT_API + handle = CreateFile2(path_wide, GENERIC_READ, 0, OPEN_EXISTING, NULL); +#else + handle = CreateFileW(path_wide, GENERIC_READ, 0, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); +#endif + mz_os_unicode_string_delete(&path_wide); + + large_size.QuadPart = 0; + + if (handle != INVALID_HANDLE_VALUE) { + GetFileSizeEx(handle, &large_size); + CloseHandle(handle); + } + + return large_size.QuadPart; +} + +static void mz_os_file_to_unix_time(FILETIME file_time, time_t *unix_time) { + uint64_t quad_file_time = 0; + quad_file_time = file_time.dwLowDateTime; + quad_file_time |= ((uint64_t)file_time.dwHighDateTime << 32); + *unix_time = (time_t)((quad_file_time - 116444736000000000LL) / 10000000); +} + +static void mz_os_unix_to_file_time(time_t unix_time, FILETIME *file_time) { + uint64_t quad_file_time = 0; + quad_file_time = ((uint64_t)unix_time * 10000000) + 116444736000000000LL; + file_time->dwHighDateTime = (quad_file_time >> 32); + file_time->dwLowDateTime = (uint32_t)(quad_file_time); +} + +int32_t mz_os_get_file_date(const char *path, time_t *modified_date, time_t *accessed_date, time_t *creation_date) { + WIN32_FIND_DATAW ff32; + HANDLE handle = NULL; + wchar_t *path_wide = NULL; + int32_t err = MZ_INTERNAL_ERROR; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + handle = FindFirstFileW(path_wide, &ff32); + MZ_FREE(path_wide); + + if (handle != INVALID_HANDLE_VALUE) { + if (modified_date != NULL) + mz_os_file_to_unix_time(ff32.ftLastWriteTime, modified_date); + if (accessed_date != NULL) + mz_os_file_to_unix_time(ff32.ftLastAccessTime, accessed_date); + if (creation_date != NULL) + mz_os_file_to_unix_time(ff32.ftCreationTime, creation_date); + + FindClose(handle); + err = MZ_OK; + } + + return err; +} + +int32_t mz_os_set_file_date(const char *path, time_t modified_date, time_t accessed_date, time_t creation_date) { + HANDLE handle = NULL; + FILETIME ftm_creation, ftm_accessed, ftm_modified; + wchar_t *path_wide = NULL; + int32_t err = MZ_OK; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + +#ifdef MZ_WINRT_API + handle = CreateFile2(path_wide, GENERIC_READ | GENERIC_WRITE, 0, OPEN_EXISTING, NULL); +#else + handle = CreateFileW(path_wide, GENERIC_READ | GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, NULL); +#endif + mz_os_unicode_string_delete(&path_wide); + + if (handle != INVALID_HANDLE_VALUE) { + GetFileTime(handle, &ftm_creation, &ftm_accessed, &ftm_modified); + + if (modified_date != 0) + mz_os_unix_to_file_time(modified_date, &ftm_modified); + if (accessed_date != 0) + mz_os_unix_to_file_time(accessed_date, &ftm_accessed); + if (creation_date != 0) + mz_os_unix_to_file_time(creation_date, &ftm_creation); + + if (SetFileTime(handle, &ftm_creation, &ftm_accessed, &ftm_modified) == 0) + err = MZ_INTERNAL_ERROR; + + CloseHandle(handle); + } + + return err; +} + +int32_t mz_os_get_file_attribs(const char *path, uint32_t *attributes) { + wchar_t *path_wide = NULL; + int32_t err = MZ_OK; + + if (path == NULL || attributes == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + *attributes = GetFileAttributesW(path_wide); + mz_os_unicode_string_delete(&path_wide); + + if (*attributes == INVALID_FILE_ATTRIBUTES) + err = MZ_INTERNAL_ERROR; + + return err; +} + +int32_t mz_os_set_file_attribs(const char *path, uint32_t attributes) { + wchar_t *path_wide = NULL; + int32_t err = MZ_OK; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + if (SetFileAttributesW(path_wide, attributes) == 0) + err = MZ_INTERNAL_ERROR; + mz_os_unicode_string_delete(&path_wide); + + return err; +} + +int32_t mz_os_make_dir(const char *path) { + wchar_t *path_wide = NULL; + int32_t err = MZ_OK; + + if (path == NULL) + return MZ_PARAM_ERROR; + + /* Don't try to create a drive letter */ + if ((path[0] != 0) && (strlen(path) <= 3) && (path[1] == ':')) + return mz_os_is_dir(path); + + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + if (CreateDirectoryW(path_wide, NULL) == 0) { + if (GetLastError() != ERROR_ALREADY_EXISTS) + err = MZ_INTERNAL_ERROR; + } + + mz_os_unicode_string_delete(&path_wide); + + return err; +} + +DIR *mz_os_open_dir(const char *path) { + WIN32_FIND_DATAW find_data; + DIR_int *dir_int = NULL; + wchar_t *path_wide = NULL; + char fixed_path[320]; + void *handle = NULL; + + + if (path == NULL) + return NULL; + + strncpy(fixed_path, path, sizeof(fixed_path) - 1); + fixed_path[sizeof(fixed_path) - 1] = 0; + + mz_path_append_slash(fixed_path, sizeof(fixed_path), MZ_PATH_SLASH_PLATFORM); + mz_path_combine(fixed_path, "*", sizeof(fixed_path)); + + path_wide = mz_os_unicode_string_create(fixed_path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return NULL; + + handle = FindFirstFileW(path_wide, &find_data); + mz_os_unicode_string_delete(&path_wide); + + if (handle == INVALID_HANDLE_VALUE) + return NULL; + + dir_int = (DIR_int *)MZ_ALLOC(sizeof(DIR_int)); + if (dir_int == NULL) + return NULL; + dir_int->find_handle = handle; + dir_int->end = 0; + + memcpy(&dir_int->find_data, &find_data, sizeof(dir_int->find_data)); + + return (DIR *)dir_int; +} + +struct dirent* mz_os_read_dir(DIR *dir) { + DIR_int *dir_int; + + if (dir == NULL) + return NULL; + + dir_int = (DIR_int *)dir; + if (dir_int->end) + return NULL; + + WideCharToMultiByte(CP_UTF8, 0, dir_int->find_data.cFileName, -1, + dir_int->entry.d_name, sizeof(dir_int->entry.d_name), NULL, NULL); + + if (FindNextFileW(dir_int->find_handle, &dir_int->find_data) == 0) { + if (GetLastError() != ERROR_NO_MORE_FILES) + return NULL; + + dir_int->end = 1; + } + + return &dir_int->entry; +} + +int32_t mz_os_close_dir(DIR *dir) { + DIR_int *dir_int; + + if (dir == NULL) + return MZ_PARAM_ERROR; + + dir_int = (DIR_int *)dir; + if (dir_int->find_handle != INVALID_HANDLE_VALUE) + FindClose(dir_int->find_handle); + MZ_FREE(dir_int); + return MZ_OK; +} + +int32_t mz_os_is_dir(const char *path) { + wchar_t *path_wide = NULL; + uint32_t attribs = 0; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + attribs = GetFileAttributesW(path_wide); + mz_os_unicode_string_delete(&path_wide); + + if (attribs != 0xFFFFFFFF) { + if (attribs & FILE_ATTRIBUTE_DIRECTORY) + return MZ_OK; + } + + return MZ_EXIST_ERROR; +} + +int32_t mz_os_is_symlink(const char *path) { + wchar_t *path_wide = NULL; + uint32_t attribs = 0; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + + attribs = GetFileAttributesW(path_wide); + mz_os_unicode_string_delete(&path_wide); + + if (attribs != 0xFFFFFFFF) { + if (attribs & FILE_ATTRIBUTE_REPARSE_POINT) + return MZ_OK; + } + + return MZ_EXIST_ERROR; +} + +int32_t mz_os_make_symlink(const char *path, const char *target_path) { + typedef BOOLEAN (WINAPI *LPCREATESYMBOLICLINKW)(LPCWSTR, LPCWSTR, DWORD); + LPCREATESYMBOLICLINKW create_symbolic_link_w = NULL; + HMODULE kernel32_mod = NULL; + wchar_t *path_wide = NULL; + wchar_t *target_path_wide = NULL; + int32_t err = MZ_OK; + int32_t flags = 0; + + if (path == NULL) + return MZ_PARAM_ERROR; + +#ifdef MZ_WINRT_API + MEMORY_BASIC_INFORMATION mbi; + memset(&mbi, 0, sizeof(mbi)); + VirtualQuery(VirtualQuery, &mbi, sizeof(mbi)); + kernel32_mod = (HMODULE)mbi.AllocationBase; +#else + kernel32_mod = GetModuleHandleW(L"kernel32.dll"); +#endif + + if (kernel32_mod == NULL) + return MZ_SUPPORT_ERROR; + + create_symbolic_link_w = (LPCREATESYMBOLICLINKW)GetProcAddress(kernel32_mod, "CreateSymbolicLinkW"); + if (create_symbolic_link_w == NULL) { + return MZ_SUPPORT_ERROR; + } + + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) { + return MZ_PARAM_ERROR; + } + + target_path_wide = mz_os_unicode_string_create(target_path, MZ_ENCODING_UTF8); + if (target_path_wide != NULL) { + if (mz_path_has_slash(target_path) == MZ_OK) + flags |= SYMBOLIC_LINK_FLAG_DIRECTORY; + + if (create_symbolic_link_w(path_wide, target_path_wide, flags) == FALSE) + err = MZ_SYMLINK_ERROR; + + mz_os_unicode_string_delete(&target_path_wide); + } else { + err = MZ_PARAM_ERROR; + } + + mz_os_unicode_string_delete(&path_wide); + + return err; +} + +int32_t mz_os_read_symlink(const char *path, char *target_path, int32_t max_target_path) { + typedef struct _REPARSE_DATA_BUFFER { + ULONG ReparseTag; + USHORT ReparseDataLength; + USHORT Reserved; + union { + struct { + USHORT SubstituteNameOffset; + USHORT SubstituteNameLength; + USHORT PrintNameOffset; + USHORT PrintNameLength; + ULONG Flags; + WCHAR PathBuffer[1]; + } SymbolicLinkReparseBuffer; + struct { + USHORT SubstituteNameOffset; + USHORT SubstituteNameLength; + USHORT PrintNameOffset; + USHORT PrintNameLength; + WCHAR PathBuffer[1]; + } MountPointReparseBuffer; + struct { + UCHAR DataBuffer[1]; + } GenericReparseBuffer; + }; + } REPARSE_DATA_BUFFER; + REPARSE_DATA_BUFFER *reparse_data = NULL; + DWORD length = 0; + HANDLE handle = NULL; + wchar_t *path_wide = NULL; + wchar_t *target_path_wide = NULL; + uint8_t buffer[MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; + int32_t target_path_len = 0; + int32_t target_path_idx = 0; + int32_t err = MZ_OK; + uint8_t *target_path_utf8 = NULL; + + if (path == NULL) + return MZ_PARAM_ERROR; + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + +#ifdef MZ_WINRT_API + CREATEFILE2_EXTENDED_PARAMETERS extended_params; + memset(&extended_params, 0, sizeof(extended_params)); + extended_params.dwSize = sizeof(extended_params); + extended_params.dwFileAttributes = FILE_ATTRIBUTE_NORMAL; + extended_params.dwFileFlags = FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT; + extended_params.dwSecurityQosFlags = SECURITY_ANONYMOUS; + extended_params.lpSecurityAttributes = NULL; + extended_params.hTemplateFile = NULL; + handle = CreateFile2(path_wide, FILE_READ_EA, FILE_SHARE_READ | FILE_SHARE_WRITE, OPEN_EXISTING, &extended_params); +#else + handle = CreateFileW(path_wide, FILE_READ_EA, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, + FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, NULL); +#endif + + if (handle == INVALID_HANDLE_VALUE) { + mz_os_unicode_string_delete(&path_wide); + return MZ_OPEN_ERROR; + } + + if (DeviceIoControl(handle, FSCTL_GET_REPARSE_POINT, NULL, 0, buffer, sizeof(buffer), &length, NULL) == TRUE) { + reparse_data = (REPARSE_DATA_BUFFER *)buffer; + if ((IsReparseTagMicrosoft(reparse_data->ReparseTag)) && + (reparse_data->ReparseTag == IO_REPARSE_TAG_SYMLINK)) { + target_path_len = max_target_path * sizeof(wchar_t); + if (target_path_len > reparse_data->SymbolicLinkReparseBuffer.PrintNameLength) + target_path_len = reparse_data->SymbolicLinkReparseBuffer.PrintNameLength; + + target_path_wide = (wchar_t *)MZ_ALLOC(target_path_len + sizeof(wchar_t)); + if (target_path_wide) { + target_path_idx = reparse_data->SymbolicLinkReparseBuffer.PrintNameOffset / sizeof(wchar_t); + memcpy(target_path_wide, &reparse_data->SymbolicLinkReparseBuffer.PathBuffer[target_path_idx], + target_path_len); + + target_path_wide[target_path_len / sizeof(wchar_t)] = 0; + target_path_utf8 = mz_os_utf8_string_create_from_unicode(target_path_wide, MZ_ENCODING_UTF8); + + if (target_path_utf8) { + strncpy(target_path, (const char *)target_path_utf8, max_target_path - 1); + target_path[max_target_path - 1] = 0; + /* Ensure directories have slash at the end so we can recreate them later */ + if (mz_os_is_dir((const char *)target_path_utf8) == MZ_OK) + mz_path_append_slash(target_path, max_target_path, MZ_PATH_SLASH_PLATFORM); + mz_os_utf8_string_delete(&target_path_utf8); + } else { + err = MZ_MEM_ERROR; + } + + MZ_FREE(target_path_wide); + } else { + err = MZ_MEM_ERROR; + } + } + } else { + err = MZ_INTERNAL_ERROR; + } + + CloseHandle(handle); + mz_os_unicode_string_delete(&path_wide); + return err; +} + +uint64_t mz_os_ms_time(void) { + SYSTEMTIME system_time; + FILETIME file_time; + uint64_t quad_file_time = 0; + + GetSystemTime(&system_time); + SystemTimeToFileTime(&system_time, &file_time); + + quad_file_time = file_time.dwLowDateTime; + quad_file_time |= ((uint64_t)file_time.dwHighDateTime << 32); + + return quad_file_time / 10000 - 11644473600000LL; +} diff --git a/Externals/minizip/mz_strm.c b/Externals/minizip/mz_strm.c new file mode 100644 index 000000000000..da7d5872d61a --- /dev/null +++ b/Externals/minizip/mz_strm.c @@ -0,0 +1,560 @@ +/* mz_strm.c -- Stream interface + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_strm.h" + +/***************************************************************************/ + +#define MZ_STREAM_FIND_SIZE (1024) + +/***************************************************************************/ + +int32_t mz_stream_open(void *stream, const char *path, int32_t mode) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->open == NULL) + return MZ_STREAM_ERROR; + return strm->vtbl->open(strm, path, mode); +} + +int32_t mz_stream_is_open(void *stream) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->is_open == NULL) + return MZ_STREAM_ERROR; + return strm->vtbl->is_open(strm); +} + +int32_t mz_stream_read(void *stream, void *buf, int32_t size) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->read == NULL) + return MZ_PARAM_ERROR; + if (mz_stream_is_open(stream) != MZ_OK) + return MZ_STREAM_ERROR; + return strm->vtbl->read(strm, buf, size); +} + +static int32_t mz_stream_read_value(void *stream, uint64_t *value, int32_t len) { + uint8_t buf[8]; + int32_t n = 0; + int32_t i = 0; + + *value = 0; + if (mz_stream_read(stream, buf, len) == len) { + for (n = 0; n < len; n += 1, i += 8) + *value += ((uint64_t)buf[n]) << i; + } else if (mz_stream_error(stream)) + return MZ_STREAM_ERROR; + else + return MZ_END_OF_STREAM; + + return MZ_OK; +} + +int32_t mz_stream_read_uint8(void *stream, uint8_t *value) { + int32_t err = MZ_OK; + uint64_t value64 = 0; + + *value = 0; + err = mz_stream_read_value(stream, &value64, sizeof(uint8_t)); + if (err == MZ_OK) + *value = (uint8_t)value64; + return err; +} + +int32_t mz_stream_read_uint16(void *stream, uint16_t *value) { + int32_t err = MZ_OK; + uint64_t value64 = 0; + + *value = 0; + err = mz_stream_read_value(stream, &value64, sizeof(uint16_t)); + if (err == MZ_OK) + *value = (uint16_t)value64; + return err; +} + +int32_t mz_stream_read_uint32(void *stream, uint32_t *value) { + int32_t err = MZ_OK; + uint64_t value64 = 0; + + *value = 0; + err = mz_stream_read_value(stream, &value64, sizeof(uint32_t)); + if (err == MZ_OK) + *value = (uint32_t)value64; + return err; +} + +int32_t mz_stream_read_int64(void *stream, int64_t *value) { + return mz_stream_read_value(stream, (uint64_t *)value, sizeof(uint64_t)); +} + +int32_t mz_stream_read_uint64(void *stream, uint64_t *value) { + return mz_stream_read_value(stream, value, sizeof(uint64_t)); +} + +int32_t mz_stream_write(void *stream, const void *buf, int32_t size) { + mz_stream *strm = (mz_stream *)stream; + if (size == 0) + return size; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->write == NULL) + return MZ_PARAM_ERROR; + if (mz_stream_is_open(stream) != MZ_OK) + return MZ_STREAM_ERROR; + return strm->vtbl->write(strm, buf, size); +} + +static int32_t mz_stream_write_value(void *stream, uint64_t value, int32_t len) { + uint8_t buf[8]; + int32_t n = 0; + + for (n = 0; n < len; n += 1) { + buf[n] = (uint8_t)(value & 0xff); + value >>= 8; + } + + if (value != 0) { + /* Data overflow - hack for ZIP64 (X Roche) */ + for (n = 0; n < len; n += 1) + buf[n] = 0xff; + } + + if (mz_stream_write(stream, buf, len) != len) + return MZ_STREAM_ERROR; + + return MZ_OK; +} + +int32_t mz_stream_write_uint8(void *stream, uint8_t value) { + return mz_stream_write_value(stream, value, sizeof(uint8_t)); +} + +int32_t mz_stream_write_uint16(void *stream, uint16_t value) { + return mz_stream_write_value(stream, value, sizeof(uint16_t)); +} + +int32_t mz_stream_write_uint32(void *stream, uint32_t value) { + return mz_stream_write_value(stream, value, sizeof(uint32_t)); +} + +int32_t mz_stream_write_int64(void *stream, int64_t value) { + return mz_stream_write_value(stream, (uint64_t)value, sizeof(uint64_t)); +} + +int32_t mz_stream_write_uint64(void *stream, uint64_t value) { + return mz_stream_write_value(stream, value, sizeof(uint64_t)); +} + +int32_t mz_stream_copy(void *target, void *source, int32_t len) { + return mz_stream_copy_stream(target, NULL, source, NULL, len); +} + +int32_t mz_stream_copy_to_end(void *target, void *source) { + return mz_stream_copy_stream_to_end(target, NULL, source, NULL); +} + +int32_t mz_stream_copy_stream(void *target, mz_stream_write_cb write_cb, void *source, + mz_stream_read_cb read_cb, int32_t len) { + uint8_t buf[16384]; + int32_t bytes_to_copy = 0; + int32_t read = 0; + int32_t written = 0; + + if (write_cb == NULL) + write_cb = mz_stream_write; + if (read_cb == NULL) + read_cb = mz_stream_read; + + while (len > 0) { + bytes_to_copy = len; + if (bytes_to_copy > (int32_t)sizeof(buf)) + bytes_to_copy = sizeof(buf); + read = read_cb(source, buf, bytes_to_copy); + if (read <= 0) + return MZ_STREAM_ERROR; + written = write_cb(target, buf, read); + if (written != read) + return MZ_STREAM_ERROR; + len -= read; + } + + return MZ_OK; +} + +int32_t mz_stream_copy_stream_to_end(void *target, mz_stream_write_cb write_cb, void *source, + mz_stream_read_cb read_cb) { + uint8_t buf[16384]; + int32_t read = 0; + int32_t written = 0; + + if (write_cb == NULL) + write_cb = mz_stream_write; + if (read_cb == NULL) + read_cb = mz_stream_read; + + read = read_cb(source, buf, sizeof(buf)); + while (read > 0) { + written = write_cb(target, buf, read); + if (written != read) + return MZ_STREAM_ERROR; + read = read_cb(source, buf, sizeof(buf)); + } + + if (read < 0) + return MZ_STREAM_ERROR; + + return MZ_OK; +} + +int64_t mz_stream_tell(void *stream) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->tell == NULL) + return MZ_PARAM_ERROR; + if (mz_stream_is_open(stream) != MZ_OK) + return MZ_STREAM_ERROR; + return strm->vtbl->tell(strm); +} + +int32_t mz_stream_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->seek == NULL) + return MZ_PARAM_ERROR; + if (mz_stream_is_open(stream) != MZ_OK) + return MZ_STREAM_ERROR; + if (origin == MZ_SEEK_SET && offset < 0) + return MZ_SEEK_ERROR; + return strm->vtbl->seek(strm, offset, origin); +} + +int32_t mz_stream_find(void *stream, const void *find, int32_t find_size, int64_t max_seek, int64_t *position) { + uint8_t buf[MZ_STREAM_FIND_SIZE]; + int32_t buf_pos = 0; + int32_t read_size = sizeof(buf); + int32_t read = 0; + int64_t read_pos = 0; + int64_t start_pos = 0; + int64_t disk_pos = 0; + int32_t i = 0; + uint8_t first = 1; + int32_t err = MZ_OK; + + if (stream == NULL || find == NULL || position == NULL) + return MZ_PARAM_ERROR; + if (find_size < 0 || find_size >= (int32_t)sizeof(buf)) + return MZ_PARAM_ERROR; + + *position = -1; + + start_pos = mz_stream_tell(stream); + + while (read_pos < max_seek) { + if (read_size > (int32_t)(max_seek - read_pos - buf_pos) && (max_seek - read_pos - buf_pos) < (int64_t)sizeof(buf)) + read_size = (int32_t)(max_seek - read_pos - buf_pos); + + read = mz_stream_read(stream, buf + buf_pos, read_size); + if ((read <= 0) || (read + buf_pos < find_size)) + break; + + for (i = 0; i <= read + buf_pos - find_size; i += 1) { + if (memcmp(&buf[i], find, find_size) != 0) + continue; + + disk_pos = mz_stream_tell(stream); + + /* Seek to position on disk where the data was found */ + err = mz_stream_seek(stream, disk_pos - ((int64_t)read + buf_pos - i), MZ_SEEK_SET); + if (err != MZ_OK) + return MZ_EXIST_ERROR; + + *position = start_pos + read_pos + i; + return MZ_OK; + } + + if (first) { + read -= find_size; + read_size -= find_size; + buf_pos = find_size; + first = 0; + } + + memmove(buf, buf + read, find_size); + read_pos += read; + } + + return MZ_EXIST_ERROR; +} + +int32_t mz_stream_find_reverse(void *stream, const void *find, int32_t find_size, int64_t max_seek, int64_t *position) { + uint8_t buf[MZ_STREAM_FIND_SIZE]; + int32_t buf_pos = 0; + int32_t read_size = MZ_STREAM_FIND_SIZE; + int64_t read_pos = 0; + int32_t read = 0; + int64_t start_pos = 0; + int64_t disk_pos = 0; + uint8_t first = 1; + int32_t i = 0; + int32_t err = MZ_OK; + + if (stream == NULL || find == NULL || position == NULL) + return MZ_PARAM_ERROR; + if (find_size < 0 || find_size >= (int32_t)sizeof(buf)) + return MZ_PARAM_ERROR; + + *position = -1; + + start_pos = mz_stream_tell(stream); + + while (read_pos < max_seek) { + if (read_size > (int32_t)(max_seek - read_pos) && (max_seek - read_pos) < (int64_t)sizeof(buf)) + read_size = (int32_t)(max_seek - read_pos); + + if (mz_stream_seek(stream, start_pos - (read_pos + read_size), MZ_SEEK_SET) != MZ_OK) + break; + read = mz_stream_read(stream, buf, read_size); + if ((read <= 0) || (read + buf_pos < find_size)) + break; + if (read + buf_pos < MZ_STREAM_FIND_SIZE) + memmove(buf + MZ_STREAM_FIND_SIZE - (read + buf_pos), buf, read); + + for (i = find_size; i <= (read + buf_pos); i += 1) { + if (memcmp(&buf[MZ_STREAM_FIND_SIZE - i], find, find_size) != 0) + continue; + + disk_pos = mz_stream_tell(stream); + + /* Seek to position on disk where the data was found */ + err = mz_stream_seek(stream, disk_pos + buf_pos - i, MZ_SEEK_SET); + if (err != MZ_OK) + return MZ_EXIST_ERROR; + + *position = start_pos - (read_pos - buf_pos + i); + return MZ_OK; + } + + if (first) { + read -= find_size; + read_size -= find_size; + buf_pos = find_size; + first = 0; + } + + if (read == 0) + break; + + memmove(buf + read_size, buf, find_size); + read_pos += read; + } + + return MZ_EXIST_ERROR; +} + +int32_t mz_stream_close(void *stream) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->close == NULL) + return MZ_PARAM_ERROR; + if (mz_stream_is_open(stream) != MZ_OK) + return MZ_STREAM_ERROR; + return strm->vtbl->close(strm); +} + +int32_t mz_stream_error(void *stream) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->error == NULL) + return MZ_PARAM_ERROR; + return strm->vtbl->error(strm); +} + +int32_t mz_stream_set_base(void *stream, void *base) { + mz_stream *strm = (mz_stream *)stream; + strm->base = (mz_stream *)base; + return MZ_OK; +} + +void* mz_stream_get_interface(void *stream) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL) + return NULL; + return (void *)strm->vtbl; +} + +int32_t mz_stream_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->get_prop_int64 == NULL) + return MZ_PARAM_ERROR; + return strm->vtbl->get_prop_int64(stream, prop, value); +} + +int32_t mz_stream_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream *strm = (mz_stream *)stream; + if (strm == NULL || strm->vtbl == NULL || strm->vtbl->set_prop_int64 == NULL) + return MZ_PARAM_ERROR; + return strm->vtbl->set_prop_int64(stream, prop, value); +} + +void *mz_stream_create(void **stream, mz_stream_vtbl *vtbl) { + if (stream == NULL) + return NULL; + if (vtbl == NULL || vtbl->create == NULL) + return NULL; + return vtbl->create(stream); +} + +void mz_stream_delete(void **stream) { + mz_stream *strm = NULL; + if (stream == NULL) + return; + strm = (mz_stream *)*stream; + if (strm != NULL && strm->vtbl != NULL && strm->vtbl->destroy != NULL) + strm->vtbl->destroy(stream); + *stream = NULL; +} + +/***************************************************************************/ + +typedef struct mz_stream_raw_s { + mz_stream stream; + int64_t total_in; + int64_t total_out; + int64_t max_total_in; +} mz_stream_raw; + +/***************************************************************************/ + +int32_t mz_stream_raw_open(void *stream, const char *path, int32_t mode) { + MZ_UNUSED(stream); + MZ_UNUSED(path); + MZ_UNUSED(mode); + + return MZ_OK; +} + +int32_t mz_stream_raw_is_open(void *stream) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + return mz_stream_is_open(raw->stream.base); +} + +int32_t mz_stream_raw_read(void *stream, void *buf, int32_t size) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + int32_t bytes_to_read = size; + int32_t read = 0; + + if (raw->max_total_in > 0) { + if ((int64_t)bytes_to_read > (raw->max_total_in - raw->total_in)) + bytes_to_read = (int32_t)(raw->max_total_in - raw->total_in); + } + + read = mz_stream_read(raw->stream.base, buf, bytes_to_read); + + if (read > 0) { + raw->total_in += read; + raw->total_out += read; + } + + return read; +} + +int32_t mz_stream_raw_write(void *stream, const void *buf, int32_t size) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + int32_t written = 0; + + written = mz_stream_write(raw->stream.base, buf, size); + + if (written > 0) { + raw->total_out += written; + raw->total_in += written; + } + + return written; +} + +int64_t mz_stream_raw_tell(void *stream) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + return mz_stream_tell(raw->stream.base); +} + +int32_t mz_stream_raw_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + return mz_stream_seek(raw->stream.base, offset, origin); +} + +int32_t mz_stream_raw_close(void *stream) { + MZ_UNUSED(stream); + return MZ_OK; +} + +int32_t mz_stream_raw_error(void *stream) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + return mz_stream_error(raw->stream.base); +} + +int32_t mz_stream_raw_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = raw->total_in; + return MZ_OK; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = raw->total_out; + return MZ_OK; + } + return MZ_EXIST_ERROR; +} + +int32_t mz_stream_raw_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_raw *raw = (mz_stream_raw *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN_MAX: + raw->max_total_in = value; + return MZ_OK; + } + return MZ_EXIST_ERROR; +} + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_raw_vtbl = { + mz_stream_raw_open, + mz_stream_raw_is_open, + mz_stream_raw_read, + mz_stream_raw_write, + mz_stream_raw_tell, + mz_stream_raw_seek, + mz_stream_raw_close, + mz_stream_raw_error, + mz_stream_raw_create, + mz_stream_raw_delete, + mz_stream_raw_get_prop_int64, + mz_stream_raw_set_prop_int64 +}; + +/***************************************************************************/ + +void *mz_stream_raw_create(void **stream) { + mz_stream_raw *raw = NULL; + + raw = (mz_stream_raw *)MZ_ALLOC(sizeof(mz_stream_raw)); + if (raw != NULL) { + memset(raw, 0, sizeof(mz_stream_raw)); + raw->stream.vtbl = &mz_stream_raw_vtbl; + } + if (stream != NULL) + *stream = raw; + + return raw; +} + +void mz_stream_raw_delete(void **stream) { + mz_stream_raw *raw = NULL; + if (stream == NULL) + return; + raw = (mz_stream_raw *)*stream; + if (raw != NULL) + MZ_FREE(raw); + *stream = NULL; +} diff --git a/Externals/minizip/mz_strm.h b/Externals/minizip/mz_strm.h new file mode 100644 index 000000000000..8b0027cf5038 --- /dev/null +++ b/Externals/minizip/mz_strm.h @@ -0,0 +1,132 @@ +/* mz_strm.h -- Stream interface + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_H +#define MZ_STREAM_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +#define MZ_STREAM_PROP_TOTAL_IN (1) +#define MZ_STREAM_PROP_TOTAL_IN_MAX (2) +#define MZ_STREAM_PROP_TOTAL_OUT (3) +#define MZ_STREAM_PROP_TOTAL_OUT_MAX (4) +#define MZ_STREAM_PROP_HEADER_SIZE (5) +#define MZ_STREAM_PROP_FOOTER_SIZE (6) +#define MZ_STREAM_PROP_DISK_SIZE (7) +#define MZ_STREAM_PROP_DISK_NUMBER (8) +#define MZ_STREAM_PROP_COMPRESS_LEVEL (9) +#define MZ_STREAM_PROP_COMPRESS_METHOD (10) +#define MZ_STREAM_PROP_COMPRESS_WINDOW (11) + +/***************************************************************************/ + +typedef int32_t (*mz_stream_open_cb) (void *stream, const char *path, int32_t mode); +typedef int32_t (*mz_stream_is_open_cb) (void *stream); +typedef int32_t (*mz_stream_read_cb) (void *stream, void *buf, int32_t size); +typedef int32_t (*mz_stream_write_cb) (void *stream, const void *buf, int32_t size); +typedef int64_t (*mz_stream_tell_cb) (void *stream); +typedef int32_t (*mz_stream_seek_cb) (void *stream, int64_t offset, int32_t origin); +typedef int32_t (*mz_stream_close_cb) (void *stream); +typedef int32_t (*mz_stream_error_cb) (void *stream); +typedef void* (*mz_stream_create_cb) (void **stream); +typedef void (*mz_stream_destroy_cb) (void **stream); + +typedef int32_t (*mz_stream_get_prop_int64_cb) (void *stream, int32_t prop, int64_t *value); +typedef int32_t (*mz_stream_set_prop_int64_cb) (void *stream, int32_t prop, int64_t value); + +typedef int32_t (*mz_stream_find_cb) (void *stream, const void *find, int32_t find_size, + int64_t max_seek, int64_t *position); + +/***************************************************************************/ + +typedef struct mz_stream_vtbl_s { + mz_stream_open_cb open; + mz_stream_is_open_cb is_open; + mz_stream_read_cb read; + mz_stream_write_cb write; + mz_stream_tell_cb tell; + mz_stream_seek_cb seek; + mz_stream_close_cb close; + mz_stream_error_cb error; + mz_stream_create_cb create; + mz_stream_destroy_cb destroy; + + mz_stream_get_prop_int64_cb get_prop_int64; + mz_stream_set_prop_int64_cb set_prop_int64; +} mz_stream_vtbl; + +typedef struct mz_stream_s { + mz_stream_vtbl *vtbl; + struct mz_stream_s *base; +} mz_stream; + +/***************************************************************************/ + +int32_t mz_stream_open(void *stream, const char *path, int32_t mode); +int32_t mz_stream_is_open(void *stream); +int32_t mz_stream_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_read_uint8(void *stream, uint8_t *value); +int32_t mz_stream_read_uint16(void *stream, uint16_t *value); +int32_t mz_stream_read_uint32(void *stream, uint32_t *value); +int32_t mz_stream_read_int64(void *stream, int64_t *value); +int32_t mz_stream_read_uint64(void *stream, uint64_t *value); +int32_t mz_stream_write(void *stream, const void *buf, int32_t size); +int32_t mz_stream_write_uint8(void *stream, uint8_t value); +int32_t mz_stream_write_uint16(void *stream, uint16_t value); +int32_t mz_stream_write_uint32(void *stream, uint32_t value); +int32_t mz_stream_write_int64(void *stream, int64_t value); +int32_t mz_stream_write_uint64(void *stream, uint64_t value); +int32_t mz_stream_copy(void *target, void *source, int32_t len); +int32_t mz_stream_copy_to_end(void *target, void *source); +int32_t mz_stream_copy_stream(void *target, mz_stream_write_cb write_cb, void *source, mz_stream_read_cb read_cb, int32_t len); +int32_t mz_stream_copy_stream_to_end(void *target, mz_stream_write_cb write_cb, void *source, mz_stream_read_cb read_cb); +int64_t mz_stream_tell(void *stream); +int32_t mz_stream_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_find(void *stream, const void *find, int32_t find_size, int64_t max_seek, int64_t *position); +int32_t mz_stream_find_reverse(void *stream, const void *find, int32_t find_size, int64_t max_seek, int64_t *position); +int32_t mz_stream_close(void *stream); +int32_t mz_stream_error(void *stream); + +int32_t mz_stream_set_base(void *stream, void *base); +void* mz_stream_get_interface(void *stream); +int32_t mz_stream_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_create(void **stream, mz_stream_vtbl *vtbl); +void mz_stream_delete(void **stream); + +/***************************************************************************/ + +int32_t mz_stream_raw_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_raw_is_open(void *stream); +int32_t mz_stream_raw_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_raw_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_raw_tell(void *stream); +int32_t mz_stream_raw_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_raw_close(void *stream); +int32_t mz_stream_raw_error(void *stream); + +int32_t mz_stream_raw_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_raw_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_raw_create(void **stream); +void mz_stream_raw_delete(void **stream); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_buf.c b/Externals/minizip/mz_strm_buf.c new file mode 100644 index 000000000000..1dfdfdf5fd8c --- /dev/null +++ b/Externals/minizip/mz_strm_buf.c @@ -0,0 +1,385 @@ +/* mz_strm_buf.c -- Stream for buffering reads/writes + part of the minizip-ng project + + This version of ioapi is designed to buffer IO. + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_buf.h" + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_buffered_vtbl = { + mz_stream_buffered_open, + mz_stream_buffered_is_open, + mz_stream_buffered_read, + mz_stream_buffered_write, + mz_stream_buffered_tell, + mz_stream_buffered_seek, + mz_stream_buffered_close, + mz_stream_buffered_error, + mz_stream_buffered_create, + mz_stream_buffered_delete, + NULL, + NULL +}; + +/***************************************************************************/ + +typedef struct mz_stream_buffered_s { + mz_stream stream; + int32_t error; + char readbuf[INT16_MAX]; + int32_t readbuf_len; + int32_t readbuf_pos; + int32_t readbuf_hits; + int32_t readbuf_misses; + char writebuf[INT16_MAX]; + int32_t writebuf_len; + int32_t writebuf_pos; + int32_t writebuf_hits; + int32_t writebuf_misses; + int64_t position; +} mz_stream_buffered; + +/***************************************************************************/ + +#if 0 +# define mz_stream_buffered_print printf +#else +# define mz_stream_buffered_print(fmt,...) +#endif + +/***************************************************************************/ + +static int32_t mz_stream_buffered_reset(void *stream) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + + buffered->readbuf_len = 0; + buffered->readbuf_pos = 0; + buffered->writebuf_len = 0; + buffered->writebuf_pos = 0; + buffered->position = 0; + + return MZ_OK; +} + +int32_t mz_stream_buffered_open(void *stream, const char *path, int32_t mode) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + mz_stream_buffered_print("Buffered - Open (mode %" PRId32 ")\n", mode); + mz_stream_buffered_reset(buffered); + return mz_stream_open(buffered->stream.base, path, mode); +} + +int32_t mz_stream_buffered_is_open(void *stream) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + return mz_stream_is_open(buffered->stream.base); +} + +static int32_t mz_stream_buffered_flush(void *stream, int32_t *written) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + int32_t total_bytes_written = 0; + int32_t bytes_to_write = buffered->writebuf_len; + int32_t bytes_left_to_write = buffered->writebuf_len; + int32_t bytes_written = 0; + + *written = 0; + + while (bytes_left_to_write > 0) { + bytes_written = mz_stream_write(buffered->stream.base, + buffered->writebuf + (bytes_to_write - bytes_left_to_write), bytes_left_to_write); + + if (bytes_written != bytes_left_to_write) + return MZ_WRITE_ERROR; + + buffered->writebuf_misses += 1; + + mz_stream_buffered_print("Buffered - Write flush (%" PRId32 ":%" PRId32 " len %" PRId32 ")\n", + bytes_to_write, bytes_left_to_write, buffered->writebuf_len); + + total_bytes_written += bytes_written; + bytes_left_to_write -= bytes_written; + buffered->position += bytes_written; + } + + buffered->writebuf_len = 0; + buffered->writebuf_pos = 0; + + *written = total_bytes_written; + return MZ_OK; +} + +int32_t mz_stream_buffered_read(void *stream, void *buf, int32_t size) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + int32_t buf_len = 0; + int32_t bytes_to_read = 0; + int32_t bytes_to_copy = 0; + int32_t bytes_left_to_read = size; + int32_t bytes_read = 0; + int32_t bytes_flushed = 0; + + mz_stream_buffered_print("Buffered - Read (size %" PRId32 " pos %" PRId64 ")\n", size, buffered->position); + + if (buffered->writebuf_len > 0) { + int64_t position = buffered->position + buffered->writebuf_pos + + mz_stream_buffered_print("Buffered - Switch from write to read, flushing (pos %" PRId64 ")\n", position); + + mz_stream_buffered_flush(stream, &bytes_flushed); + mz_stream_buffered_seek(stream, position, MZ_SEEK_SET); + } + + while (bytes_left_to_read > 0) { + if ((buffered->readbuf_len == 0) || (buffered->readbuf_pos == buffered->readbuf_len)) { + if (buffered->readbuf_len == sizeof(buffered->readbuf)) { + buffered->readbuf_pos = 0; + buffered->readbuf_len = 0; + } + + bytes_to_read = (int32_t)sizeof(buffered->readbuf) - (buffered->readbuf_len - buffered->readbuf_pos); + bytes_read = mz_stream_read(buffered->stream.base, buffered->readbuf + buffered->readbuf_pos, bytes_to_read); + if (bytes_read < 0) + return bytes_read; + + buffered->readbuf_misses += 1; + buffered->readbuf_len += bytes_read; + buffered->position += bytes_read; + + mz_stream_buffered_print("Buffered - Filled (read %" PRId32 "/%" PRId32 " buf %" PRId32 ":%" PRId32 " pos %" PRId64 ")\n", + bytes_read, bytes_to_read, buffered->readbuf_pos, buffered->readbuf_len, buffered->position); + + if (bytes_read == 0) + break; + } + + if ((buffered->readbuf_len - buffered->readbuf_pos) > 0) { + bytes_to_copy = buffered->readbuf_len - buffered->readbuf_pos; + if (bytes_to_copy > bytes_left_to_read) + bytes_to_copy = bytes_left_to_read; + + memcpy((char *)buf + buf_len, buffered->readbuf + buffered->readbuf_pos, bytes_to_copy); + + buf_len += bytes_to_copy; + bytes_left_to_read -= bytes_to_copy; + + buffered->readbuf_hits += 1; + buffered->readbuf_pos += bytes_to_copy; + + mz_stream_buffered_print("Buffered - Emptied (copied %" PRId32 " remaining %" PRId32 " buf %" PRId32 ":%" PRId32 " pos %" PRId64 ")\n", + bytes_to_copy, bytes_left_to_read, buffered->readbuf_pos, buffered->readbuf_len, buffered->position); + } + } + + return size - bytes_left_to_read; +} + +int32_t mz_stream_buffered_write(void *stream, const void *buf, int32_t size) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + int32_t bytes_to_write = size; + int32_t bytes_left_to_write = size; + int32_t bytes_to_copy = 0; + int32_t bytes_used = 0; + int32_t bytes_flushed = 0; + int32_t err = MZ_OK; + + + mz_stream_buffered_print("Buffered - Write (size %" PRId32 " len %" PRId32 " pos %" PRId64 ")\n", + size, buffered->writebuf_len, buffered->position); + + if (buffered->readbuf_len > 0) { + buffered->position -= buffered->readbuf_len; + buffered->position += buffered->readbuf_pos; + + buffered->readbuf_len = 0; + buffered->readbuf_pos = 0; + + mz_stream_buffered_print("Buffered - Switch from read to write (pos %" PRId64 ")\n", buffered->position); + + err = mz_stream_seek(buffered->stream.base, buffered->position, MZ_SEEK_SET); + if (err != MZ_OK) + return err; + } + + while (bytes_left_to_write > 0) { + bytes_used = buffered->writebuf_len; + if (bytes_used > buffered->writebuf_pos) + bytes_used = buffered->writebuf_pos; + bytes_to_copy = (int32_t)sizeof(buffered->writebuf) - bytes_used; + if (bytes_to_copy > bytes_left_to_write) + bytes_to_copy = bytes_left_to_write; + + if (bytes_to_copy == 0) { + err = mz_stream_buffered_flush(stream, &bytes_flushed); + if (err != MZ_OK) + return err; + if (bytes_flushed == 0) + return 0; + + continue; + } + + memcpy(buffered->writebuf + buffered->writebuf_pos, + (const char *)buf + (bytes_to_write - bytes_left_to_write), bytes_to_copy); + + mz_stream_buffered_print("Buffered - Write copy (remaining %" PRId32 " write %" PRId32 ":%" PRId32 " len %" PRId32 ")\n", + bytes_to_copy, bytes_to_write, bytes_left_to_write, buffered->writebuf_len); + + bytes_left_to_write -= bytes_to_copy; + + buffered->writebuf_pos += bytes_to_copy; + buffered->writebuf_hits += 1; + if (buffered->writebuf_pos > buffered->writebuf_len) + buffered->writebuf_len += buffered->writebuf_pos - buffered->writebuf_len; + } + + return size - bytes_left_to_write; +} + +int64_t mz_stream_buffered_tell(void *stream) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + int64_t position = mz_stream_tell(buffered->stream.base); + + buffered->position = position; + + mz_stream_buffered_print("Buffered - Tell (pos %" PRId64 " readpos %" PRId32 " writepos %" PRId32 ")\n", + buffered->position, buffered->readbuf_pos, buffered->writebuf_pos); + + if (buffered->readbuf_len > 0) + position -= ((int64_t)buffered->readbuf_len - buffered->readbuf_pos); + if (buffered->writebuf_len > 0) + position += buffered->writebuf_pos; + return position; +} + +int32_t mz_stream_buffered_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + int32_t bytes_flushed = 0; + int32_t err = MZ_OK; + + mz_stream_buffered_print("Buffered - Seek (origin %" PRId32 " offset %" PRId64 " pos %" PRId64 ")\n", + origin, offset, buffered->position); + + switch (origin) { + case MZ_SEEK_SET: + + if ((buffered->readbuf_len > 0) && (offset < buffered->position) && + (offset >= buffered->position - buffered->readbuf_len)) { + buffered->readbuf_pos = (int32_t)(offset - (buffered->position - buffered->readbuf_len)); + return MZ_OK; + } + if (buffered->writebuf_len > 0) { + if ((offset >= buffered->position) && (offset <= buffered->position + buffered->writebuf_len)) { + buffered->writebuf_pos = (int32_t)(offset - buffered->position); + return MZ_OK; + } + } + + err = mz_stream_buffered_flush(stream, &bytes_flushed); + if (err != MZ_OK) + return err; + + buffered->position = offset; + break; + + case MZ_SEEK_CUR: + + if (buffered->readbuf_len > 0) { + if (offset <= ((int64_t)buffered->readbuf_len - buffered->readbuf_pos)) { + buffered->readbuf_pos += (uint32_t)offset; + return MZ_OK; + } + offset -= ((int64_t)buffered->readbuf_len - buffered->readbuf_pos); + buffered->position += offset; + } + if (buffered->writebuf_len > 0) { + if (offset <= ((int64_t)buffered->writebuf_len - buffered->writebuf_pos)) { + buffered->writebuf_pos += (uint32_t)offset; + return MZ_OK; + } + /* offset -= (buffered->writebuf_len - buffered->writebuf_pos); */ + } + + err = mz_stream_buffered_flush(stream, &bytes_flushed); + if (err != MZ_OK) + return err; + + break; + + case MZ_SEEK_END: + + if (buffered->writebuf_len > 0) { + buffered->writebuf_pos = buffered->writebuf_len; + return MZ_OK; + } + break; + } + + buffered->readbuf_len = 0; + buffered->readbuf_pos = 0; + buffered->writebuf_len = 0; + buffered->writebuf_pos = 0; + + return mz_stream_seek(buffered->stream.base, offset, origin); +} + +int32_t mz_stream_buffered_close(void *stream) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + int32_t bytes_flushed = 0; + + mz_stream_buffered_flush(stream, &bytes_flushed); + mz_stream_buffered_print("Buffered - Close (flushed %" PRId32 ")\n", bytes_flushed); + + if (buffered->readbuf_hits + buffered->readbuf_misses > 0) { + mz_stream_buffered_print("Buffered - Read efficiency %.02f%%\n", + (buffered->readbuf_hits / ((float)buffered->readbuf_hits + buffered->readbuf_misses)) * 100); + } + + if (buffered->writebuf_hits + buffered->writebuf_misses > 0) { + mz_stream_buffered_print("Buffered - Write efficiency %.02f%%\n", + (buffered->writebuf_hits / ((float)buffered->writebuf_hits + buffered->writebuf_misses)) * 100); + } + + mz_stream_buffered_reset(buffered); + + return mz_stream_close(buffered->stream.base); +} + +int32_t mz_stream_buffered_error(void *stream) { + mz_stream_buffered *buffered = (mz_stream_buffered *)stream; + return mz_stream_error(buffered->stream.base); +} + +void *mz_stream_buffered_create(void **stream) { + mz_stream_buffered *buffered = NULL; + + buffered = (mz_stream_buffered *)MZ_ALLOC(sizeof(mz_stream_buffered)); + if (buffered != NULL) { + memset(buffered, 0, sizeof(mz_stream_buffered)); + buffered->stream.vtbl = &mz_stream_buffered_vtbl; + } + if (stream != NULL) + *stream = buffered; + + return buffered; +} + +void mz_stream_buffered_delete(void **stream) { + mz_stream_buffered *buffered = NULL; + if (stream == NULL) + return; + buffered = (mz_stream_buffered *)*stream; + if (buffered != NULL) + MZ_FREE(buffered); + *stream = NULL; +} + +void *mz_stream_buffered_get_interface(void) { + return (void *)&mz_stream_buffered_vtbl; +} diff --git a/Externals/minizip/mz_strm_buf.h b/Externals/minizip/mz_strm_buf.h new file mode 100644 index 000000000000..b71e6e4e9200 --- /dev/null +++ b/Externals/minizip/mz_strm_buf.h @@ -0,0 +1,42 @@ +/* mz_strm_buf.h -- Stream for buffering reads/writes + part of the minizip-ng project + + This version of ioapi is designed to buffer IO. + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_BUFFERED_H +#define MZ_STREAM_BUFFERED_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_buffered_open(void *stream, const char *path, int32_t mode); +int32_t mz_stream_buffered_is_open(void *stream); +int32_t mz_stream_buffered_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_buffered_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_buffered_tell(void *stream); +int32_t mz_stream_buffered_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_buffered_close(void *stream); +int32_t mz_stream_buffered_error(void *stream); + +void* mz_stream_buffered_create(void **stream); +void mz_stream_buffered_delete(void **stream); + +void* mz_stream_buffered_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_bzip.c b/Externals/minizip/mz_strm_bzip.c new file mode 100644 index 000000000000..31d8bf9a5e65 --- /dev/null +++ b/Externals/minizip/mz_strm_bzip.c @@ -0,0 +1,374 @@ +/* mz_strm_bzip.c -- Stream for bzip inflate/deflate + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_bzip.h" + +#include "bzlib.h" + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_bzip_vtbl = { + mz_stream_bzip_open, + mz_stream_bzip_is_open, + mz_stream_bzip_read, + mz_stream_bzip_write, + mz_stream_bzip_tell, + mz_stream_bzip_seek, + mz_stream_bzip_close, + mz_stream_bzip_error, + mz_stream_bzip_create, + mz_stream_bzip_delete, + mz_stream_bzip_get_prop_int64, + mz_stream_bzip_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_bzip_s { + mz_stream stream; + bz_stream bzstream; + int32_t mode; + int32_t error; + uint8_t buffer[INT16_MAX]; + int32_t buffer_len; + int16_t stream_end; + int64_t total_in; + int64_t total_out; + int64_t max_total_in; + int8_t initialized; + int16_t level; +} mz_stream_bzip; + +/***************************************************************************/ + +int32_t mz_stream_bzip_open(void *stream, const char *path, int32_t mode) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + + MZ_UNUSED(path); + + bzip->bzstream.bzalloc = 0; + bzip->bzstream.bzfree = 0; + bzip->bzstream.opaque = 0; + bzip->bzstream.total_in_lo32 = 0; + bzip->bzstream.total_in_hi32 = 0; + bzip->bzstream.total_out_lo32 = 0; + bzip->bzstream.total_out_hi32 = 0; + + bzip->total_in = 0; + bzip->total_out = 0; + + if (mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + bzip->bzstream.next_out = (char *)bzip->buffer; + bzip->bzstream.avail_out = sizeof(bzip->buffer); + + bzip->error = BZ2_bzCompressInit(&bzip->bzstream, bzip->level, 0, 0); +#endif + } else if (mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + bzip->bzstream.next_in = (char *)bzip->buffer; + bzip->bzstream.avail_in = 0; + + bzip->error = BZ2_bzDecompressInit(&bzip->bzstream, 0, 0); +#endif + } + + if (bzip->error != BZ_OK) + return MZ_OPEN_ERROR; + + bzip->initialized = 1; + bzip->stream_end = 0; + bzip->mode = mode; + return MZ_OK; +} + +int32_t mz_stream_bzip_is_open(void *stream) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + if (bzip->initialized != 1) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_bzip_read(void *stream, void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + uint64_t total_in_before = 0; + uint64_t total_out_before = 0; + uint64_t total_in_after = 0; + uint64_t total_out_after = 0; + int32_t total_in = 0; + int32_t total_out = 0; + int32_t in_bytes = 0; + int32_t out_bytes = 0; + int32_t bytes_to_read = sizeof(bzip->buffer); + int32_t read = 0; + int32_t err = BZ_OK; + + + if (bzip->stream_end) + return 0; + + bzip->bzstream.next_out = (char *)buf; + bzip->bzstream.avail_out = (unsigned int)size; + + do { + if (bzip->bzstream.avail_in == 0) { + if (bzip->max_total_in > 0) { + if ((int64_t)bytes_to_read > (bzip->max_total_in - bzip->total_in)) + bytes_to_read = (int32_t)(bzip->max_total_in - bzip->total_in); + } + + read = mz_stream_read(bzip->stream.base, bzip->buffer, bytes_to_read); + + if (read < 0) + return read; + + bzip->bzstream.next_in = (char *)bzip->buffer; + bzip->bzstream.avail_in = (uint32_t)read; + } + + total_in_before = bzip->bzstream.avail_in; + total_out_before = bzip->bzstream.total_out_lo32 + + (((uint64_t)bzip->bzstream.total_out_hi32) << 32); + + err = BZ2_bzDecompress(&bzip->bzstream); + + total_in_after = bzip->bzstream.avail_in; + total_out_after = bzip->bzstream.total_out_lo32 + + (((uint64_t)bzip->bzstream.total_out_hi32) << 32); + + in_bytes = (int32_t)(total_in_before - total_in_after); + out_bytes = (int32_t)(total_out_after - total_out_before); + + total_in += in_bytes; + total_out += out_bytes; + + bzip->total_in += in_bytes; + bzip->total_out += out_bytes; + + if (err == BZ_STREAM_END) { + bzip->stream_end = 1; + break; + } + if (err != BZ_OK && err != BZ_RUN_OK) { + bzip->error = err; + break; + } + } while (bzip->bzstream.avail_out > 0); + + if (bzip->error != 0) + return MZ_DATA_ERROR; + + return total_out; +#endif +} + +#ifndef MZ_ZIP_NO_COMPRESSION +static int32_t mz_stream_bzip_flush(void *stream) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + if (mz_stream_write(bzip->stream.base, bzip->buffer, bzip->buffer_len) != bzip->buffer_len) + return MZ_WRITE_ERROR; + return MZ_OK; +} + +static int32_t mz_stream_bzip_compress(void *stream, int flush) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + uint32_t out_bytes = 0; + int32_t err = BZ_OK; + + do { + if (bzip->bzstream.avail_out == 0) { + err = mz_stream_bzip_flush(bzip); + if (err != MZ_OK) + return err; + + bzip->bzstream.avail_out = sizeof(bzip->buffer); + bzip->bzstream.next_out = (char *)bzip->buffer; + + bzip->buffer_len = 0; + } + + total_out_before = bzip->bzstream.total_out_lo32 + + (((uint64_t)bzip->bzstream.total_out_hi32) << 32); + + err = BZ2_bzCompress(&bzip->bzstream, flush); + + total_out_after = bzip->bzstream.total_out_lo32 + + (((uint64_t)bzip->bzstream.total_out_hi32) << 32); + + out_bytes = (uint32_t)(total_out_after - total_out_before); + + bzip->buffer_len += out_bytes; + bzip->total_out += out_bytes; + + if (err == BZ_STREAM_END) + break; + if (err < 0) { + bzip->error = err; + return MZ_DATA_ERROR; + } + } while ((bzip->bzstream.avail_in > 0) || (flush == BZ_FINISH && err == BZ_FINISH_OK)); + + return MZ_OK; +} +#endif + +int32_t mz_stream_bzip_write(void *stream, const void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_COMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + int32_t err = MZ_OK; + + bzip->bzstream.next_in = (char *)(intptr_t)buf; + bzip->bzstream.avail_in = (unsigned int)size; + + err = mz_stream_bzip_compress(stream, BZ_RUN); + if (err != MZ_OK) { + return err; + } + + bzip->total_in += size; + return size; +#endif +} + +int64_t mz_stream_bzip_tell(void *stream) { + MZ_UNUSED(stream); + + return MZ_TELL_ERROR; +} + +int32_t mz_stream_bzip_seek(void *stream, int64_t offset, int32_t origin) { + MZ_UNUSED(stream); + MZ_UNUSED(offset); + MZ_UNUSED(origin); + + return MZ_SEEK_ERROR; +} + +int32_t mz_stream_bzip_close(void *stream) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + + if (bzip->mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + mz_stream_bzip_compress(stream, BZ_FINISH); + mz_stream_bzip_flush(stream); + + BZ2_bzCompressEnd(&bzip->bzstream); +#endif + } else if (bzip->mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + BZ2_bzDecompressEnd(&bzip->bzstream); +#endif + } + + bzip->initialized = 0; + + if (bzip->error != BZ_OK) + return MZ_CLOSE_ERROR; + return MZ_OK; +} + +int32_t mz_stream_bzip_error(void *stream) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + return bzip->error; +} + +int32_t mz_stream_bzip_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = bzip->total_in; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = bzip->max_total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = bzip->total_out; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = 0; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_bzip_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_bzip *bzip = (mz_stream_bzip *)stream; + switch (prop) { + case MZ_STREAM_PROP_COMPRESS_LEVEL: + if (value < 0) + bzip->level = 6; + else + bzip->level = (int16_t)value; + return MZ_OK; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + bzip->max_total_in = value; + return MZ_OK; + } + return MZ_EXIST_ERROR; +} + +void *mz_stream_bzip_create(void **stream) { + mz_stream_bzip *bzip = NULL; + + bzip = (mz_stream_bzip *)MZ_ALLOC(sizeof(mz_stream_bzip)); + if (bzip != NULL) { + memset(bzip, 0, sizeof(mz_stream_bzip)); + bzip->stream.vtbl = &mz_stream_bzip_vtbl; + bzip->level = 6; + } + if (stream != NULL) + *stream = bzip; + + return bzip; +} + +void mz_stream_bzip_delete(void **stream) { + mz_stream_bzip *bzip = NULL; + if (stream == NULL) + return; + bzip = (mz_stream_bzip *)*stream; + if (bzip != NULL) + MZ_FREE(bzip); + *stream = NULL; +} + +void *mz_stream_bzip_get_interface(void) { + return (void *)&mz_stream_bzip_vtbl; +} + +extern void bz_internal_error(int errcode) { + MZ_UNUSED(errcode); +} diff --git a/Externals/minizip/mz_strm_bzip.h b/Externals/minizip/mz_strm_bzip.h new file mode 100644 index 000000000000..71b2b380baa7 --- /dev/null +++ b/Externals/minizip/mz_strm_bzip.h @@ -0,0 +1,45 @@ +/* mz_strm_bzip.h -- Stream for bzip inflate/deflate + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_BZIP_H +#define MZ_STREAM_BZIP_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_bzip_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_bzip_is_open(void *stream); +int32_t mz_stream_bzip_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_bzip_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_bzip_tell(void *stream); +int32_t mz_stream_bzip_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_bzip_close(void *stream); +int32_t mz_stream_bzip_error(void *stream); + +int32_t mz_stream_bzip_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_bzip_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_bzip_create(void **stream); +void mz_stream_bzip_delete(void **stream); + +void* mz_stream_bzip_get_interface(void); + +void bz_internal_error(int errcode); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_libcomp.c b/Externals/minizip/mz_strm_libcomp.c new file mode 100644 index 000000000000..27eb93345384 --- /dev/null +++ b/Externals/minizip/mz_strm_libcomp.c @@ -0,0 +1,356 @@ +/* mz_strm_libcomp.c -- Stream for apple compression + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_libcomp.h" + +#include + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_libcomp_vtbl = { + mz_stream_libcomp_open, + mz_stream_libcomp_is_open, + mz_stream_libcomp_read, + mz_stream_libcomp_write, + mz_stream_libcomp_tell, + mz_stream_libcomp_seek, + mz_stream_libcomp_close, + mz_stream_libcomp_error, + mz_stream_libcomp_create, + mz_stream_libcomp_delete, + mz_stream_libcomp_get_prop_int64, + mz_stream_libcomp_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_libcomp_s { + mz_stream stream; + compression_stream + cstream; + uint8_t buffer[INT16_MAX]; + int32_t buffer_len; + int64_t total_in; + int64_t total_out; + int64_t max_total_in; + int8_t initialized; + int32_t mode; + int32_t error; + int16_t method; +} mz_stream_libcomp; + +/***************************************************************************/ + +int32_t mz_stream_libcomp_open(void *stream, const char *path, int32_t mode) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + int32_t err = 0; + int16_t operation = 0; + compression_algorithm algorithm = 0; + + MZ_UNUSED(path); + + if (libcomp->method == 0) + return MZ_PARAM_ERROR; + + libcomp->total_in = 0; + libcomp->total_out = 0; + + if (mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + operation = COMPRESSION_STREAM_ENCODE; +#endif + } else if (mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + operation = COMPRESSION_STREAM_DECODE; +#endif + } + + if (libcomp->method == MZ_COMPRESS_METHOD_DEFLATE) + algorithm = COMPRESSION_ZLIB; + else if (libcomp->method == MZ_COMPRESS_METHOD_XZ) + algorithm = COMPRESSION_LZMA; + else + return MZ_SUPPORT_ERROR; + + err = compression_stream_init(&libcomp->cstream, (compression_stream_operation)operation, algorithm); + + if (err == COMPRESSION_STATUS_ERROR) { + libcomp->error = err; + return MZ_OPEN_ERROR; + } + + libcomp->initialized = 1; + libcomp->mode = mode; + return MZ_OK; +} + +int32_t mz_stream_libcomp_is_open(void *stream) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + if (libcomp->initialized != 1) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_libcomp_read(void *stream, void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + uint64_t total_in_before = 0; + uint64_t total_in_after = 0; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + int32_t total_in = 0; + int32_t total_out = 0; + int32_t in_bytes = 0; + int32_t out_bytes = 0; + int32_t bytes_to_read = sizeof(libcomp->buffer); + int32_t read = 0; + int32_t err = MZ_OK; + int16_t flags = 0; + + libcomp->cstream.dst_ptr = buf; + libcomp->cstream.dst_size = (size_t)size; + + do { + if (libcomp->cstream.src_size == 0) { + if (libcomp->max_total_in > 0) { + if ((int64_t)bytes_to_read > (libcomp->max_total_in - libcomp->total_in)) + bytes_to_read = (int32_t)(libcomp->max_total_in - libcomp->total_in); + } + + read = mz_stream_read(libcomp->stream.base, libcomp->buffer, bytes_to_read); + + if (read < 0) + return read; + if (read == 0) + flags = COMPRESSION_STREAM_FINALIZE; + + libcomp->cstream.src_ptr = libcomp->buffer; + libcomp->cstream.src_size = (size_t)read; + } + + total_in_before = libcomp->cstream.src_size; + total_out_before = libcomp->cstream.dst_size; + + err = compression_stream_process(&libcomp->cstream, flags); + if (err == COMPRESSION_STATUS_ERROR) { + libcomp->error = err; + break; + } + + total_in_after = libcomp->cstream.src_size; + total_out_after = libcomp->cstream.dst_size; + + in_bytes = (int32_t)(total_in_before - total_in_after); + out_bytes = (int32_t)(total_out_before - total_out_after); + + total_in += in_bytes; + total_out += out_bytes; + + libcomp->total_in += in_bytes; + libcomp->total_out += out_bytes; + + if (err == COMPRESSION_STATUS_END) + break; + if (err != COMPRESSION_STATUS_OK) { + libcomp->error = err; + break; + } + } while (libcomp->cstream.dst_size > 0); + + if (libcomp->error != 0) + return MZ_DATA_ERROR; + + return total_out; +#endif +} + +static int32_t mz_stream_libcomp_flush(void *stream) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + if (mz_stream_write(libcomp->stream.base, libcomp->buffer, libcomp->buffer_len) != libcomp->buffer_len) + return MZ_WRITE_ERROR; + return MZ_OK; +} + +static int32_t mz_stream_libcomp_deflate(void *stream, int flush) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + uint32_t out_bytes = 0; + int32_t err = MZ_OK; + + + do { + if (libcomp->cstream.dst_size == 0) { + err = mz_stream_libcomp_flush(libcomp); + if (err != MZ_OK) { + libcomp->error = err; + return err; + } + + libcomp->cstream.dst_size = sizeof(libcomp->buffer); + libcomp->cstream.dst_ptr = libcomp->buffer; + + libcomp->buffer_len = 0; + } + + total_out_before = libcomp->cstream.dst_size; + err = compression_stream_process(&libcomp->cstream, flush); + total_out_after = libcomp->cstream.dst_size; + + out_bytes = (uint32_t)(total_out_before - total_out_after); + + libcomp->buffer_len += out_bytes; + libcomp->total_out += out_bytes; + + if (err == COMPRESSION_STATUS_END) + break; + if (err != COMPRESSION_STATUS_OK) { + libcomp->error = err; + return MZ_DATA_ERROR; + } + } while ((libcomp->cstream.src_size > 0) || (flush == COMPRESSION_STREAM_FINALIZE && err == COMPRESSION_STATUS_OK)); + + return MZ_OK; +} + +int32_t mz_stream_libcomp_write(void *stream, const void *buf, int32_t size) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + int32_t err = size; + +#ifdef MZ_ZIP_NO_COMPRESSION + MZ_UNUSED(libcomp); + err = MZ_SUPPORT_ERROR; +#else + libcomp->cstream.src_ptr = buf; + libcomp->cstream.src_size = (size_t)size; + + mz_stream_libcomp_deflate(stream, 0); + + libcomp->total_in += size; +#endif + return err; +} + +int64_t mz_stream_libcomp_tell(void *stream) { + MZ_UNUSED(stream); + + return MZ_TELL_ERROR; +} + +int32_t mz_stream_libcomp_seek(void *stream, int64_t offset, int32_t origin) { + MZ_UNUSED(stream); + MZ_UNUSED(offset); + MZ_UNUSED(origin); + + return MZ_SEEK_ERROR; +} + +int32_t mz_stream_libcomp_close(void *stream) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + + + if (libcomp->mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + mz_stream_libcomp_deflate(stream, COMPRESSION_STREAM_FINALIZE); + mz_stream_libcomp_flush(stream); +#endif + } else if (libcomp->mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#endif + } + + compression_stream_destroy(&libcomp->cstream); + + libcomp->initialized = 0; + + if (libcomp->error != MZ_OK) + return MZ_CLOSE_ERROR; + return MZ_OK; +} + +int32_t mz_stream_libcomp_error(void *stream) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + return libcomp->error; +} + +int32_t mz_stream_libcomp_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = libcomp->total_in; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = libcomp->max_total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = libcomp->total_out; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = 0; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_libcomp_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_libcomp *libcomp = (mz_stream_libcomp *)stream; + switch (prop) { + case MZ_STREAM_PROP_COMPRESS_METHOD: + libcomp->method = (int16_t)value; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + libcomp->max_total_in = value; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +void *mz_stream_libcomp_create(void **stream) { + mz_stream_libcomp *libcomp = NULL; + + libcomp = (mz_stream_libcomp *)MZ_ALLOC(sizeof(mz_stream_libcomp)); + if (libcomp != NULL) { + memset(libcomp, 0, sizeof(mz_stream_libcomp)); + libcomp->stream.vtbl = &mz_stream_libcomp_vtbl; + } + if (stream != NULL) + *stream = libcomp; + + return libcomp; +} + +void mz_stream_libcomp_delete(void **stream) { + mz_stream_libcomp *libcomp = NULL; + if (stream == NULL) + return; + libcomp = (mz_stream_libcomp *)*stream; + if (libcomp != NULL) + MZ_FREE(libcomp); + *stream = NULL; +} diff --git a/Externals/minizip/mz_strm_libcomp.h b/Externals/minizip/mz_strm_libcomp.h new file mode 100644 index 000000000000..5c3fee8df94f --- /dev/null +++ b/Externals/minizip/mz_strm_libcomp.h @@ -0,0 +1,45 @@ +/* mz_strm_libcomp.h -- Stream for apple compression + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_LIBCOMP_H +#define MZ_STREAM_LIBCOMP_H + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_libcomp_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_libcomp_is_open(void *stream); +int32_t mz_stream_libcomp_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_libcomp_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_libcomp_tell(void *stream); +int32_t mz_stream_libcomp_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_libcomp_close(void *stream); +int32_t mz_stream_libcomp_error(void *stream); + +int32_t mz_stream_libcomp_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_libcomp_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_libcomp_create(void **stream); +void mz_stream_libcomp_delete(void **stream); + +void* mz_stream_libcomp_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_lzma.c b/Externals/minizip/mz_strm_lzma.c new file mode 100644 index 000000000000..22fb972ef3bd --- /dev/null +++ b/Externals/minizip/mz_strm_lzma.c @@ -0,0 +1,470 @@ +/* mz_strm_lzma.c -- Stream for lzma inflate/deflate + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_lzma.h" + +#include "lzma.h" + +/***************************************************************************/ + +#define MZ_LZMA_MAGIC_SIZE (4) +#define MZ_LZMA_ZIP_HEADER_SIZE (5) +#define MZ_LZMA_ALONE_HEADER_SIZE (MZ_LZMA_ZIP_HEADER_SIZE + 8) + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_lzma_vtbl = { + mz_stream_lzma_open, + mz_stream_lzma_is_open, + mz_stream_lzma_read, + mz_stream_lzma_write, + mz_stream_lzma_tell, + mz_stream_lzma_seek, + mz_stream_lzma_close, + mz_stream_lzma_error, + mz_stream_lzma_create, + mz_stream_lzma_delete, + mz_stream_lzma_get_prop_int64, + mz_stream_lzma_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_lzma_s { + mz_stream stream; + lzma_stream lstream; + int32_t mode; + int32_t error; + uint8_t buffer[INT16_MAX]; + int32_t buffer_len; + int64_t total_in; + int64_t total_out; + int64_t max_total_in; + int64_t max_total_out; + int8_t initialized; + int8_t header; + int32_t header_size; + uint32_t preset; + int16_t method; +} mz_stream_lzma; + +/***************************************************************************/ + +int32_t mz_stream_lzma_open(void *stream, const char *path, int32_t mode) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + lzma_filter filters[LZMA_FILTERS_MAX + 1]; + lzma_options_lzma opt_lzma; + uint32_t size = 0; + uint8_t major = 0; + uint8_t minor = 0; + + MZ_UNUSED(path); + + memset(&opt_lzma, 0, sizeof(opt_lzma)); + + lzma->lstream.total_in = 0; + lzma->lstream.total_out = 0; + + lzma->total_in = 0; + lzma->total_out = 0; + lzma->header = 0; + + if (mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + MZ_UNUSED(filters); + MZ_UNUSED(major); + MZ_UNUSED(minor); + return MZ_SUPPORT_ERROR; +#else + lzma->lstream.next_out = lzma->buffer; + lzma->lstream.avail_out = sizeof(lzma->buffer); + + if (lzma_lzma_preset(&opt_lzma, lzma->preset)) + return MZ_OPEN_ERROR; + + memset(&filters, 0, sizeof(filters)); + + if (lzma->method == MZ_COMPRESS_METHOD_LZMA) + filters[0].id = LZMA_FILTER_LZMA1; + else if (lzma->method == MZ_COMPRESS_METHOD_XZ) + filters[0].id = LZMA_FILTER_LZMA2; + + filters[0].options = &opt_lzma; + filters[1].id = LZMA_VLI_UNKNOWN; + + lzma_properties_size(&size, (lzma_filter *)&filters); + + if (lzma->method == MZ_COMPRESS_METHOD_LZMA) { + mz_stream_write_uint8(lzma->stream.base, LZMA_VERSION_MAJOR); + mz_stream_write_uint8(lzma->stream.base, LZMA_VERSION_MINOR); + mz_stream_write_uint16(lzma->stream.base, (uint16_t)size); + + lzma->header = 1; + lzma->total_out += MZ_LZMA_MAGIC_SIZE; + + lzma->error = lzma_alone_encoder(&lzma->lstream, &opt_lzma); + } else if (lzma->method == MZ_COMPRESS_METHOD_XZ) + lzma->error = lzma_stream_encoder(&lzma->lstream, filters, LZMA_CHECK_CRC64); +#endif + } else if (mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + MZ_UNUSED(filters); + MZ_UNUSED(major); + MZ_UNUSED(minor); + return MZ_SUPPORT_ERROR; +#else + lzma->lstream.next_in = lzma->buffer; + lzma->lstream.avail_in = 0; + + if (lzma->method == MZ_COMPRESS_METHOD_LZMA) { + mz_stream_read_uint8(lzma->stream.base, &major); + mz_stream_read_uint8(lzma->stream.base, &minor); + mz_stream_read_uint16(lzma->stream.base, (uint16_t *)&size); + + lzma->header = 1; + lzma->total_in += MZ_LZMA_MAGIC_SIZE; + + lzma->error = lzma_alone_decoder(&lzma->lstream, UINT64_MAX); + } else if (lzma->method == MZ_COMPRESS_METHOD_XZ) + lzma->error = lzma_stream_decoder(&lzma->lstream, UINT64_MAX, 0); +#endif + } + + if (lzma->error != LZMA_OK) + return MZ_OPEN_ERROR; + + lzma->initialized = 1; + lzma->mode = mode; + return MZ_OK; +} + +int32_t mz_stream_lzma_is_open(void *stream) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + if (lzma->initialized != 1) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_lzma_read(void *stream, void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + uint64_t total_in_before = 0; + uint64_t total_out_before = 0; + uint64_t total_in_after = 0; + uint64_t total_out_after = 0; + int32_t total_in = 0; + int32_t total_out = 0; + int32_t in_bytes = 0; + int32_t out_bytes = 0; + int32_t bytes_to_read = sizeof(lzma->buffer); + int32_t read = 0; + int32_t err = LZMA_OK; + + + lzma->lstream.next_out = (uint8_t*)buf; + lzma->lstream.avail_out = (size_t)size; + + do { + if (lzma->lstream.avail_in == 0) { + if (lzma->max_total_in > 0) { + if ((int64_t)bytes_to_read > (lzma->max_total_in - lzma->total_in)) + bytes_to_read = (int32_t)(lzma->max_total_in - lzma->total_in); + } + + if (lzma->header) { + bytes_to_read = MZ_LZMA_ZIP_HEADER_SIZE - lzma->header_size; + } + + read = mz_stream_read(lzma->stream.base, lzma->buffer, bytes_to_read); + + if (read < 0) + return read; + + /* Write uncompressed size for lzma alone header not in zip format */ + if (lzma->header) { + lzma->header_size += read; + + if (lzma->header_size == MZ_LZMA_ZIP_HEADER_SIZE) { + uint64_t uncompressed_size = UINT64_MAX; + + memcpy(lzma->buffer + MZ_LZMA_ZIP_HEADER_SIZE, &uncompressed_size, sizeof(uncompressed_size)); + + read += sizeof(uncompressed_size); + bytes_to_read = sizeof(lzma->buffer); + + lzma->total_in -= sizeof(uncompressed_size); + lzma->header = 0; + } + } + + lzma->lstream.next_in = lzma->buffer; + lzma->lstream.avail_in = (size_t)read; + } + + total_in_before = lzma->lstream.avail_in; + total_out_before = lzma->lstream.total_out; + + err = lzma_code(&lzma->lstream, LZMA_RUN); + + total_in_after = lzma->lstream.avail_in; + total_out_after = lzma->lstream.total_out; + if ((lzma->max_total_out != -1) && (int64_t)total_out_after > lzma->max_total_out) + total_out_after = (uint64_t)lzma->max_total_out; + + in_bytes = (int32_t)(total_in_before - total_in_after); + out_bytes = (int32_t)(total_out_after - total_out_before); + + total_in += in_bytes; + total_out += out_bytes; + + lzma->total_in += in_bytes; + lzma->total_out += out_bytes; + + if (err == LZMA_STREAM_END) + break; + if (err != LZMA_OK) { + lzma->error = err; + break; + } + } while (lzma->lstream.avail_out > 0); + + if (lzma->error != 0) + return MZ_DATA_ERROR; + + return total_out; +#endif +} + +#ifndef MZ_ZIP_NO_COMPRESSION +static int32_t mz_stream_lzma_flush(void *stream) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + int32_t buffer_len = lzma->buffer_len; + uint8_t *buffer = lzma->buffer; + + /* Skip writing lzma_alone header uncompressed size for zip format */ + if (lzma->header) { + uint64_t uncompressed_size = 0; + + if (lzma->buffer_len < MZ_LZMA_ALONE_HEADER_SIZE) + return MZ_OK; + + if (mz_stream_write(lzma->stream.base, buffer, MZ_LZMA_ZIP_HEADER_SIZE) != MZ_LZMA_ZIP_HEADER_SIZE) + return MZ_WRITE_ERROR; + + buffer += MZ_LZMA_ALONE_HEADER_SIZE; + buffer_len -= MZ_LZMA_ALONE_HEADER_SIZE; + + lzma->buffer_len -= sizeof(uncompressed_size); + lzma->total_out -= sizeof(uncompressed_size); + lzma->header = 0; + } + + if (mz_stream_write(lzma->stream.base, buffer, buffer_len) != buffer_len) + return MZ_WRITE_ERROR; + + return MZ_OK; +} + +static int32_t mz_stream_lzma_code(void *stream, int32_t flush) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + uint32_t out_bytes = 0; + int32_t err = LZMA_OK; + + + do { + if (lzma->lstream.avail_out == 0) { + err = mz_stream_lzma_flush(lzma); + if (err != MZ_OK) + return err; + + lzma->lstream.avail_out = sizeof(lzma->buffer); + lzma->lstream.next_out = lzma->buffer; + + lzma->buffer_len = 0; + } + + total_out_before = lzma->lstream.total_out; + err = lzma_code(&lzma->lstream, (lzma_action)flush); + total_out_after = lzma->lstream.total_out; + + out_bytes = (uint32_t)(total_out_after - total_out_before); + + if (err != LZMA_OK && err != LZMA_STREAM_END) { + lzma->error = err; + return MZ_DATA_ERROR; + } + + lzma->buffer_len += out_bytes; + lzma->total_out += out_bytes; + } while ((lzma->lstream.avail_in > 0) || (flush == LZMA_FINISH && err == LZMA_OK)); + + return MZ_OK; +} +#endif + +int32_t mz_stream_lzma_write(void *stream, const void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_COMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + int32_t err = MZ_OK; + + lzma->lstream.next_in = (uint8_t*)(intptr_t)buf; + lzma->lstream.avail_in = (size_t)size; + + err = mz_stream_lzma_code(stream, LZMA_RUN); + if (err != MZ_OK) { + return err; + } + + lzma->total_in += size; + return size; +#endif +} + +int64_t mz_stream_lzma_tell(void *stream) { + MZ_UNUSED(stream); + + return MZ_TELL_ERROR; +} + +int32_t mz_stream_lzma_seek(void *stream, int64_t offset, int32_t origin) { + MZ_UNUSED(stream); + MZ_UNUSED(offset); + MZ_UNUSED(origin); + + return MZ_SEEK_ERROR; +} + +int32_t mz_stream_lzma_close(void *stream) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + + if (lzma->mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + mz_stream_lzma_code(stream, LZMA_FINISH); + mz_stream_lzma_flush(stream); + + lzma_end(&lzma->lstream); +#endif + } else if (lzma->mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + lzma_end(&lzma->lstream); +#endif + } + + lzma->initialized = 0; + + if (lzma->error != LZMA_OK) + return MZ_CLOSE_ERROR; + return MZ_OK; +} + +int32_t mz_stream_lzma_error(void *stream) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + return lzma->error; +} + +int32_t mz_stream_lzma_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = lzma->total_in; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = lzma->max_total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = lzma->total_out; + break; + case MZ_STREAM_PROP_TOTAL_OUT_MAX: + *value = lzma->max_total_out; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = MZ_LZMA_MAGIC_SIZE; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_lzma_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_lzma *lzma = (mz_stream_lzma *)stream; + switch (prop) { + case MZ_STREAM_PROP_COMPRESS_LEVEL: + if (value >= 9) + lzma->preset = LZMA_PRESET_EXTREME; + else + lzma->preset = LZMA_PRESET_DEFAULT; + break; + case MZ_STREAM_PROP_COMPRESS_METHOD: + lzma->method = (int16_t)value; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + lzma->max_total_in = value; + break; + case MZ_STREAM_PROP_TOTAL_OUT_MAX: + if (value < -1) + return MZ_PARAM_ERROR; + lzma->max_total_out = value; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +void *mz_stream_lzma_create(void **stream) { + mz_stream_lzma *lzma = NULL; + + lzma = (mz_stream_lzma *)MZ_ALLOC(sizeof(mz_stream_lzma)); + if (lzma != NULL) { + memset(lzma, 0, sizeof(mz_stream_lzma)); + lzma->stream.vtbl = &mz_stream_lzma_vtbl; + lzma->method = MZ_COMPRESS_METHOD_LZMA; + lzma->preset = LZMA_PRESET_DEFAULT; + lzma->max_total_out = -1; + } + if (stream != NULL) + *stream = lzma; + + return lzma; +} + +void mz_stream_lzma_delete(void **stream) { + mz_stream_lzma *lzma = NULL; + if (stream == NULL) + return; + lzma = (mz_stream_lzma *)*stream; + if (lzma != NULL) + MZ_FREE(lzma); + *stream = NULL; +} + +void *mz_stream_lzma_get_interface(void) { + return (void *)&mz_stream_lzma_vtbl; +} diff --git a/Externals/minizip/mz_strm_lzma.h b/Externals/minizip/mz_strm_lzma.h new file mode 100644 index 000000000000..f447baa65c9b --- /dev/null +++ b/Externals/minizip/mz_strm_lzma.h @@ -0,0 +1,43 @@ +/* mz_strm_lzma.h -- Stream for lzma inflate/deflate + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as lzma. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_LZMA_H +#define MZ_STREAM_LZMA_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_lzma_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_lzma_is_open(void *stream); +int32_t mz_stream_lzma_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_lzma_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_lzma_tell(void *stream); +int32_t mz_stream_lzma_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_lzma_close(void *stream); +int32_t mz_stream_lzma_error(void *stream); + +int32_t mz_stream_lzma_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_lzma_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_lzma_create(void **stream); +void mz_stream_lzma_delete(void **stream); + +void* mz_stream_lzma_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_mem.c b/Externals/minizip/mz_strm_mem.c new file mode 100644 index 000000000000..f4a882d928b9 --- /dev/null +++ b/Externals/minizip/mz_strm_mem.c @@ -0,0 +1,272 @@ +/* mz_strm_mem.c -- Stream for memory access + part of the minizip-ng project + + This interface is designed to access memory rather than files. + We do use a region of memory to put data in to and take it out of. + + Based on Unzip ioapi.c version 0.22, May 19th, 2003 + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 2003 Justin Fletcher + Copyright (C) 1998-2003 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_mem.h" + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_mem_vtbl = { + mz_stream_mem_open, + mz_stream_mem_is_open, + mz_stream_mem_read, + mz_stream_mem_write, + mz_stream_mem_tell, + mz_stream_mem_seek, + mz_stream_mem_close, + mz_stream_mem_error, + mz_stream_mem_create, + mz_stream_mem_delete, + NULL, + NULL +}; + +/***************************************************************************/ + +typedef struct mz_stream_mem_s { + mz_stream stream; + int32_t mode; + uint8_t *buffer; /* Memory buffer pointer */ + int32_t size; /* Size of the memory buffer */ + int32_t limit; /* Furthest we've written */ + int32_t position; /* Current position in the memory */ + int32_t grow_size; /* Size to grow when full */ +} mz_stream_mem; + +/***************************************************************************/ + +static int32_t mz_stream_mem_set_size(void *stream, int32_t size) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + int32_t new_size = size; + uint8_t *new_buf = NULL; + + + new_buf = (uint8_t *)MZ_ALLOC((uint32_t)new_size); + if (new_buf == NULL) + return MZ_BUF_ERROR; + + if (mem->buffer) { + memcpy(new_buf, mem->buffer, mem->size); + MZ_FREE(mem->buffer); + } + + mem->buffer = new_buf; + mem->size = new_size; + return MZ_OK; +} + +int32_t mz_stream_mem_open(void *stream, const char *path, int32_t mode) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + int32_t err = MZ_OK; + + MZ_UNUSED(path); + + mem->mode = mode; + mem->limit = 0; + mem->position = 0; + + if (mem->mode & MZ_OPEN_MODE_CREATE) + err = mz_stream_mem_set_size(stream, mem->grow_size); + else + mem->limit = mem->size; + + return err; +} + +int32_t mz_stream_mem_is_open(void *stream) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + if (mem->buffer == NULL) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_mem_read(void *stream, void *buf, int32_t size) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + + if (size > mem->size - mem->position) + size = mem->size - mem->position; + if (mem->position + size > mem->limit) + size = mem->limit - mem->position; + + if (size <= 0) + return 0; + + memcpy(buf, mem->buffer + mem->position, size); + mem->position += size; + + return size; +} + +int32_t mz_stream_mem_write(void *stream, const void *buf, int32_t size) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + int32_t new_size = 0; + int32_t err = MZ_OK; + + if (size == 0) + return size; + + if (size > mem->size - mem->position) { + if (mem->mode & MZ_OPEN_MODE_CREATE) { + new_size = mem->size; + if (size < mem->grow_size) + new_size += mem->grow_size; + else + new_size += size; + + err = mz_stream_mem_set_size(stream, new_size); + if (err != MZ_OK) + return err; + } else { + size = mem->size - mem->position; + } + } + + memcpy(mem->buffer + mem->position, buf, size); + + mem->position += size; + if (mem->position > mem->limit) + mem->limit = mem->position; + + return size; +} + +int64_t mz_stream_mem_tell(void *stream) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + return mem->position; +} + +int32_t mz_stream_mem_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + int64_t new_pos = 0; + int32_t err = MZ_OK; + + switch (origin) { + case MZ_SEEK_CUR: + new_pos = mem->position + offset; + break; + case MZ_SEEK_END: + new_pos = mem->limit + offset; + break; + case MZ_SEEK_SET: + new_pos = offset; + break; + default: + return MZ_SEEK_ERROR; + } + + if (new_pos > mem->size) { + if ((mem->mode & MZ_OPEN_MODE_CREATE) == 0) + return MZ_SEEK_ERROR; + + err = mz_stream_mem_set_size(stream, (int32_t)new_pos); + if (err != MZ_OK) + return err; + } else if (new_pos < 0) { + return MZ_SEEK_ERROR; + } + + mem->position = (int32_t)new_pos; + return MZ_OK; +} + +int32_t mz_stream_mem_close(void *stream) { + MZ_UNUSED(stream); + + /* We never return errors */ + return MZ_OK; +} + +int32_t mz_stream_mem_error(void *stream) { + MZ_UNUSED(stream); + + /* We never return errors */ + return MZ_OK; +} + +void mz_stream_mem_set_buffer(void *stream, void *buf, int32_t size) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + mem->buffer = (uint8_t *)buf; + mem->size = size; + mem->limit = size; +} + +int32_t mz_stream_mem_get_buffer(void *stream, const void **buf) { + return mz_stream_mem_get_buffer_at(stream, 0, buf); +} + +int32_t mz_stream_mem_get_buffer_at(void *stream, int64_t position, const void **buf) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + if (buf == NULL || position < 0 || mem->size < position || mem->buffer == NULL) + return MZ_SEEK_ERROR; + *buf = mem->buffer + position; + return MZ_OK; +} + +int32_t mz_stream_mem_get_buffer_at_current(void *stream, const void **buf) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + return mz_stream_mem_get_buffer_at(stream, mem->position, buf); +} + +void mz_stream_mem_get_buffer_length(void *stream, int32_t *length) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + *length = mem->limit; +} + +void mz_stream_mem_set_buffer_limit(void *stream, int32_t limit) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + mem->limit = limit; +} + +void mz_stream_mem_set_grow_size(void *stream, int32_t grow_size) { + mz_stream_mem *mem = (mz_stream_mem *)stream; + mem->grow_size = grow_size; +} + +void *mz_stream_mem_create(void **stream) { + mz_stream_mem *mem = NULL; + + mem = (mz_stream_mem *)MZ_ALLOC(sizeof(mz_stream_mem)); + if (mem != NULL) { + memset(mem, 0, sizeof(mz_stream_mem)); + mem->stream.vtbl = &mz_stream_mem_vtbl; + mem->grow_size = 4096; + } + if (stream != NULL) + *stream = mem; + + return mem; +} + +void mz_stream_mem_delete(void **stream) { + mz_stream_mem *mem = NULL; + if (stream == NULL) + return; + mem = (mz_stream_mem *)*stream; + if (mem != NULL) { + if ((mem->mode & MZ_OPEN_MODE_CREATE) && (mem->buffer != NULL)) + MZ_FREE(mem->buffer); + MZ_FREE(mem); + } + *stream = NULL; +} + +void *mz_stream_mem_get_interface(void) { + return (void *)&mz_stream_mem_vtbl; +} diff --git a/Externals/minizip/mz_strm_mem.h b/Externals/minizip/mz_strm_mem.h new file mode 100644 index 000000000000..5bfa13d8a652 --- /dev/null +++ b/Externals/minizip/mz_strm_mem.h @@ -0,0 +1,48 @@ +/* mz_strm_mem.h -- Stream for memory access + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_MEM_H +#define MZ_STREAM_MEM_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_mem_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_mem_is_open(void *stream); +int32_t mz_stream_mem_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_mem_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_mem_tell(void *stream); +int32_t mz_stream_mem_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_mem_close(void *stream); +int32_t mz_stream_mem_error(void *stream); + +void mz_stream_mem_set_buffer(void *stream, void *buf, int32_t size); +int32_t mz_stream_mem_get_buffer(void *stream, const void **buf); +int32_t mz_stream_mem_get_buffer_at(void *stream, int64_t position, const void **buf); +int32_t mz_stream_mem_get_buffer_at_current(void *stream, const void **buf); +void mz_stream_mem_get_buffer_length(void *stream, int32_t *length); +void mz_stream_mem_set_buffer_limit(void *stream, int32_t limit); +void mz_stream_mem_set_grow_size(void *stream, int32_t grow_size); + +void* mz_stream_mem_create(void **stream); +void mz_stream_mem_delete(void **stream); + +void* mz_stream_mem_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_os.h b/Externals/minizip/mz_strm_os.h new file mode 100644 index 000000000000..614e2552074f --- /dev/null +++ b/Externals/minizip/mz_strm_os.h @@ -0,0 +1,40 @@ +/* mz_sstrm_os.h -- Stream for filesystem access + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_OS_H +#define MZ_STREAM_OS_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_os_open(void *stream, const char *path, int32_t mode); +int32_t mz_stream_os_is_open(void *stream); +int32_t mz_stream_os_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_os_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_os_tell(void *stream); +int32_t mz_stream_os_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_os_close(void *stream); +int32_t mz_stream_os_error(void *stream); + +void* mz_stream_os_create(void **stream); +void mz_stream_os_delete(void **stream); + +void* mz_stream_os_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_os_posix.c b/Externals/minizip/mz_strm_os_posix.c new file mode 100644 index 000000000000..f0b5bd335179 --- /dev/null +++ b/Externals/minizip/mz_strm_os_posix.c @@ -0,0 +1,206 @@ +/* mz_strm_posix.c -- Stream for filesystem access for posix/linux + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Modifications for Zip64 support + Copyright (C) 2009-2010 Mathias Svensson + http://result42.com + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_os.h" + +#include /* fopen, fread.. */ +#include + +/***************************************************************************/ + +#define fopen64 fopen +#ifndef MZ_FILE32_API +# ifndef NO_FSEEKO +# define ftello64 ftello +# define fseeko64 fseeko +# elif defined(_MSC_VER) && (_MSC_VER >= 1400) +# define ftello64 _ftelli64 +# define fseeko64 _fseeki64 +# endif +#endif +#ifndef ftello64 +# define ftello64 ftell +#endif +#ifndef fseeko64 +# define fseeko64 fseek +#endif + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_os_vtbl = { + mz_stream_os_open, + mz_stream_os_is_open, + mz_stream_os_read, + mz_stream_os_write, + mz_stream_os_tell, + mz_stream_os_seek, + mz_stream_os_close, + mz_stream_os_error, + mz_stream_os_create, + mz_stream_os_delete, + NULL, + NULL +}; + +/***************************************************************************/ + +typedef struct mz_stream_posix_s { + mz_stream stream; + int32_t error; + FILE *handle; +} mz_stream_posix; + +/***************************************************************************/ + +int32_t mz_stream_os_open(void *stream, const char *path, int32_t mode) { + mz_stream_posix *posix = (mz_stream_posix *)stream; + const char *mode_fopen = NULL; + + if (path == NULL) + return MZ_PARAM_ERROR; + + if ((mode & MZ_OPEN_MODE_READWRITE) == MZ_OPEN_MODE_READ) + mode_fopen = "rb"; + else if (mode & MZ_OPEN_MODE_APPEND) + mode_fopen = "r+b"; + else if (mode & MZ_OPEN_MODE_CREATE) + mode_fopen = "wb"; + else + return MZ_OPEN_ERROR; + + posix->handle = fopen64(path, mode_fopen); + if (posix->handle == NULL) { + posix->error = errno; + return MZ_OPEN_ERROR; + } + + if (mode & MZ_OPEN_MODE_APPEND) + return mz_stream_os_seek(stream, 0, MZ_SEEK_END); + + return MZ_OK; +} + +int32_t mz_stream_os_is_open(void *stream) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + if (posix->handle == NULL) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_os_read(void *stream, void *buf, int32_t size) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + int32_t read = (int32_t)fread(buf, 1, (size_t)size, posix->handle); + if (read < size && ferror(posix->handle)) { + posix->error = errno; + return MZ_READ_ERROR; + } + return read; +} + +int32_t mz_stream_os_write(void *stream, const void *buf, int32_t size) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + int32_t written = (int32_t)fwrite(buf, 1, (size_t)size, posix->handle); + if (written < size && ferror(posix->handle)) { + posix->error = errno; + return MZ_WRITE_ERROR; + } + return written; +} + +int64_t mz_stream_os_tell(void *stream) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + int64_t position = ftello64(posix->handle); + if (position == -1) { + posix->error = errno; + return MZ_TELL_ERROR; + } + return position; +} + +int32_t mz_stream_os_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + int32_t fseek_origin = 0; + + switch (origin) { + case MZ_SEEK_CUR: + fseek_origin = SEEK_CUR; + break; + case MZ_SEEK_END: + fseek_origin = SEEK_END; + break; + case MZ_SEEK_SET: + fseek_origin = SEEK_SET; + break; + default: + return MZ_SEEK_ERROR; + } + + if (fseeko64(posix->handle, offset, fseek_origin) != 0) { + posix->error = errno; + return MZ_SEEK_ERROR; + } + + return MZ_OK; +} + +int32_t mz_stream_os_close(void *stream) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + int32_t closed = 0; + if (posix->handle != NULL) { + closed = fclose(posix->handle); + posix->handle = NULL; + } + if (closed != 0) { + posix->error = errno; + return MZ_CLOSE_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_os_error(void *stream) { + mz_stream_posix *posix = (mz_stream_posix*)stream; + return posix->error; +} + +void *mz_stream_os_create(void **stream) { + mz_stream_posix *posix = NULL; + + posix = (mz_stream_posix *)MZ_ALLOC(sizeof(mz_stream_posix)); + if (posix != NULL) { + memset(posix, 0, sizeof(mz_stream_posix)); + posix->stream.vtbl = &mz_stream_os_vtbl; + } + if (stream != NULL) + *stream = posix; + + return posix; +} + +void mz_stream_os_delete(void **stream) { + mz_stream_posix *posix = NULL; + if (stream == NULL) + return; + posix = (mz_stream_posix *)*stream; + if (posix != NULL) + MZ_FREE(posix); + *stream = NULL; +} + +void *mz_stream_os_get_interface(void) { + return (void *)&mz_stream_os_vtbl; +} diff --git a/Externals/minizip/mz_strm_os_win32.c b/Externals/minizip/mz_strm_os_win32.c new file mode 100644 index 000000000000..893df541837a --- /dev/null +++ b/Externals/minizip/mz_strm_os_win32.c @@ -0,0 +1,296 @@ +/* mz_strm_win32.c -- Stream for filesystem access for windows + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 2009-2010 Mathias Svensson + Modifications for Zip64 support + http://result42.com + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_os.h" + +#include + +/***************************************************************************/ + +#ifndef INVALID_HANDLE_VALUE +# define INVALID_HANDLE_VALUE (0xFFFFFFFF) +#endif + +#ifndef INVALID_SET_FILE_POINTER +# define INVALID_SET_FILE_POINTER ((DWORD)-1) +#endif + +#if defined(WINAPI_FAMILY_ONE_PARTITION) && !defined(MZ_WINRT_API) +# if WINAPI_FAMILY_ONE_PARTITION(WINAPI_FAMILY, WINAPI_PARTITION_APP) +# define MZ_WINRT_API 1 +# endif +#endif + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_os_vtbl = { + mz_stream_os_open, + mz_stream_os_is_open, + mz_stream_os_read, + mz_stream_os_write, + mz_stream_os_tell, + mz_stream_os_seek, + mz_stream_os_close, + mz_stream_os_error, + mz_stream_os_create, + mz_stream_os_delete, + NULL, + NULL +}; + +/***************************************************************************/ + +typedef struct mz_stream_win32_s { + mz_stream stream; + HANDLE handle; + int32_t error; +} mz_stream_win32; + +/***************************************************************************/ + +#if 0 +# define mz_stream_os_print printf +#else +# define mz_stream_os_print(fmt,...) +#endif + +/***************************************************************************/ + +int32_t mz_stream_os_open(void *stream, const char *path, int32_t mode) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + uint32_t desired_access = 0; + uint32_t creation_disposition = 0; + uint32_t share_mode = FILE_SHARE_READ; + uint32_t flags_attribs = FILE_ATTRIBUTE_NORMAL; + wchar_t *path_wide = NULL; + + + if (path == NULL) + return MZ_PARAM_ERROR; + + /* Some use cases require write sharing as well */ + share_mode |= FILE_SHARE_WRITE; + + if ((mode & MZ_OPEN_MODE_READWRITE) == MZ_OPEN_MODE_READ) { + desired_access = GENERIC_READ; + creation_disposition = OPEN_EXISTING; + } else if (mode & MZ_OPEN_MODE_APPEND) { + desired_access = GENERIC_WRITE | GENERIC_READ; + creation_disposition = OPEN_EXISTING; + } else if (mode & MZ_OPEN_MODE_CREATE) { + desired_access = GENERIC_WRITE | GENERIC_READ; + creation_disposition = CREATE_ALWAYS; + } else { + return MZ_PARAM_ERROR; + } + + mz_stream_os_print("Win32 - Open - %s (mode %" PRId32 ")\n", path); + + path_wide = mz_os_unicode_string_create(path, MZ_ENCODING_UTF8); + if (path_wide == NULL) + return MZ_PARAM_ERROR; + +#ifdef MZ_WINRT_API + win32->handle = CreateFile2(path_wide, desired_access, share_mode, + creation_disposition, NULL); +#else + win32->handle = CreateFileW(path_wide, desired_access, share_mode, NULL, + creation_disposition, flags_attribs, NULL); +#endif + + mz_os_unicode_string_delete(&path_wide); + + if (mz_stream_os_is_open(stream) != MZ_OK) { + win32->error = GetLastError(); + return MZ_OPEN_ERROR; + } + + if (mode & MZ_OPEN_MODE_APPEND) + return mz_stream_os_seek(stream, 0, MZ_SEEK_END); + + return MZ_OK; +} + +int32_t mz_stream_os_is_open(void *stream) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + if (win32->handle == NULL || win32->handle == INVALID_HANDLE_VALUE) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_os_read(void *stream, void *buf, int32_t size) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + uint32_t read = 0; + + if (mz_stream_os_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (!ReadFile(win32->handle, buf, size, (DWORD *)&read, NULL)) { + win32->error = GetLastError(); + if (win32->error == ERROR_HANDLE_EOF) + win32->error = 0; + } + + mz_stream_os_print("Win32 - Read - %" PRId32 "\n", read); + + return read; +} + +int32_t mz_stream_os_write(void *stream, const void *buf, int32_t size) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + int32_t written = 0; + + if (mz_stream_os_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + if (!WriteFile(win32->handle, buf, size, (DWORD *)&written, NULL)) { + win32->error = GetLastError(); + if (win32->error == ERROR_HANDLE_EOF) + win32->error = 0; + } + + mz_stream_os_print("Win32 - Write - %" PRId32 "\n", written); + + return written; +} + +static int32_t mz_stream_os_seekinternal(HANDLE handle, LARGE_INTEGER large_pos, + LARGE_INTEGER *new_pos, uint32_t move_method) { +#ifdef MZ_WINRT_API + BOOL success = FALSE; + success = SetFilePointerEx(handle, large_pos, new_pos, move_method); + if ((success == FALSE) && (GetLastError() != NO_ERROR)) + return MZ_SEEK_ERROR; + + return MZ_OK; +#else + LONG high_part = 0; + uint32_t pos = 0; + + high_part = large_pos.HighPart; + pos = SetFilePointer(handle, large_pos.LowPart, &high_part, move_method); + + if ((pos == INVALID_SET_FILE_POINTER) && (GetLastError() != NO_ERROR)) + return MZ_SEEK_ERROR; + + if (new_pos != NULL) { + new_pos->LowPart = pos; + new_pos->HighPart = high_part; + } + + return MZ_OK; +#endif +} + +int64_t mz_stream_os_tell(void *stream) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + LARGE_INTEGER large_pos; + + if (mz_stream_os_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + large_pos.QuadPart = 0; + + if (mz_stream_os_seekinternal(win32->handle, large_pos, &large_pos, FILE_CURRENT) != MZ_OK) + win32->error = GetLastError(); + + mz_stream_os_print("Win32 - Tell - %" PRId64 "\n", large_pos.QuadPart); + + return large_pos.QuadPart; +} + +int32_t mz_stream_os_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + uint32_t move_method = 0xFFFFFFFF; + int32_t err = MZ_OK; + LARGE_INTEGER large_pos; + + + if (mz_stream_os_is_open(stream) != MZ_OK) + return MZ_OPEN_ERROR; + + switch (origin) { + case MZ_SEEK_CUR: + move_method = FILE_CURRENT; + break; + case MZ_SEEK_END: + move_method = FILE_END; + break; + case MZ_SEEK_SET: + move_method = FILE_BEGIN; + break; + default: + return MZ_SEEK_ERROR; + } + + mz_stream_os_print("Win32 - Seek - %" PRId64 " (origin %" PRId32 ")\n", offset, origin); + + large_pos.QuadPart = offset; + + err = mz_stream_os_seekinternal(win32->handle, large_pos, NULL, move_method); + if (err != MZ_OK) { + win32->error = GetLastError(); + return err; + } + + return MZ_OK; +} + +int32_t mz_stream_os_close(void *stream) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + + if (win32->handle != NULL) + CloseHandle(win32->handle); + mz_stream_os_print("Win32 - Close\n"); + win32->handle = NULL; + return MZ_OK; +} + +int32_t mz_stream_os_error(void *stream) { + mz_stream_win32 *win32 = (mz_stream_win32 *)stream; + return win32->error; +} + +void *mz_stream_os_create(void **stream) { + mz_stream_win32 *win32 = NULL; + + win32 = (mz_stream_win32 *)MZ_ALLOC(sizeof(mz_stream_win32)); + if (win32 != NULL) { + memset(win32, 0, sizeof(mz_stream_win32)); + win32->stream.vtbl = &mz_stream_os_vtbl; + } + if (stream != NULL) + *stream = win32; + + return win32; +} + +void mz_stream_os_delete(void **stream) { + mz_stream_win32 *win32 = NULL; + if (stream == NULL) + return; + win32 = (mz_stream_win32 *)*stream; + if (win32 != NULL) + MZ_FREE(win32); + *stream = NULL; +} + +void *mz_stream_os_get_interface(void) { + return (void *)&mz_stream_os_vtbl; +} diff --git a/Externals/minizip/mz_strm_pkcrypt.c b/Externals/minizip/mz_strm_pkcrypt.c new file mode 100644 index 000000000000..41c762f9c0f6 --- /dev/null +++ b/Externals/minizip/mz_strm_pkcrypt.c @@ -0,0 +1,338 @@ +/* mz_strm_pkcrypt.c -- Code for traditional PKWARE encryption + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2005 Gilles Vollant + Modifications for Info-ZIP crypting + https://www.winimage.com/zLibDll/minizip.html + Copyright (C) 2003 Terry Thorsen + + This code is a modified version of crypting code in Info-ZIP distribution + + Copyright (C) 1990-2000 Info-ZIP. All rights reserved. + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. + + This encryption code is a direct transcription of the algorithm from + Roger Schlafly, described by Phil Katz in the file appnote.txt. This + file (appnote.txt) is distributed with the PKZIP program (even in the + version without encryption capabilities). +*/ + + +#include "mz.h" +#include "mz_crypt.h" +#include "mz_strm.h" +#include "mz_strm_pkcrypt.h" + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_pkcrypt_vtbl = { + mz_stream_pkcrypt_open, + mz_stream_pkcrypt_is_open, + mz_stream_pkcrypt_read, + mz_stream_pkcrypt_write, + mz_stream_pkcrypt_tell, + mz_stream_pkcrypt_seek, + mz_stream_pkcrypt_close, + mz_stream_pkcrypt_error, + mz_stream_pkcrypt_create, + mz_stream_pkcrypt_delete, + mz_stream_pkcrypt_get_prop_int64, + mz_stream_pkcrypt_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_pkcrypt_s { + mz_stream stream; + int32_t error; + int16_t initialized; + uint8_t buffer[UINT16_MAX]; + int64_t total_in; + int64_t max_total_in; + int64_t total_out; + uint32_t keys[3]; /* keys defining the pseudo-random sequence */ + uint8_t verify1; + uint8_t verify2; + const char *password; +} mz_stream_pkcrypt; + +/***************************************************************************/ + +#define mz_stream_pkcrypt_decode(strm, c) \ + (mz_stream_pkcrypt_update_keys(strm, \ + c ^= mz_stream_pkcrypt_decrypt_byte(strm))) + +#define mz_stream_pkcrypt_encode(strm, c, t) \ + (t = mz_stream_pkcrypt_decrypt_byte(strm), \ + mz_stream_pkcrypt_update_keys(strm, (uint8_t)c), (uint8_t)(t^(c))) + +/***************************************************************************/ + +static uint8_t mz_stream_pkcrypt_decrypt_byte(void *stream) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + + unsigned temp; /* POTENTIAL BUG: temp*(temp^1) may overflow in an */ + /* unpredictable manner on 16-bit systems; not a problem */ + /* with any known compiler so far, though. */ + + temp = pkcrypt->keys[2] | 2; + return (uint8_t)(((temp * (temp ^ 1)) >> 8) & 0xff); +} + +static uint8_t mz_stream_pkcrypt_update_keys(void *stream, uint8_t c) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + uint8_t buf = c; + + pkcrypt->keys[0] = (uint32_t)~mz_crypt_crc32_update(~pkcrypt->keys[0], &buf, 1); + + pkcrypt->keys[1] += pkcrypt->keys[0] & 0xff; + pkcrypt->keys[1] *= 134775813L; + pkcrypt->keys[1] += 1; + + buf = (uint8_t)(pkcrypt->keys[1] >> 24); + pkcrypt->keys[2] = (uint32_t)~mz_crypt_crc32_update(~pkcrypt->keys[2], &buf, 1); + + return (uint8_t)c; +} + +static void mz_stream_pkcrypt_init_keys(void *stream, const char *password) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + + pkcrypt->keys[0] = 305419896L; + pkcrypt->keys[1] = 591751049L; + pkcrypt->keys[2] = 878082192L; + + while (*password != 0) { + mz_stream_pkcrypt_update_keys(stream, (uint8_t)*password); + password += 1; + } +} + +/***************************************************************************/ + +int32_t mz_stream_pkcrypt_open(void *stream, const char *path, int32_t mode) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + uint16_t t = 0; + int16_t i = 0; + uint8_t verify1 = 0; + uint8_t verify2 = 0; + uint8_t header[MZ_PKCRYPT_HEADER_SIZE]; + const char *password = path; + + pkcrypt->total_in = 0; + pkcrypt->total_out = 0; + pkcrypt->initialized = 0; + + if (mz_stream_is_open(pkcrypt->stream.base) != MZ_OK) + return MZ_OPEN_ERROR; + + if (password == NULL) + password = pkcrypt->password; + if (password == NULL) + return MZ_PARAM_ERROR; + + mz_stream_pkcrypt_init_keys(stream, password); + + if (mode & MZ_OPEN_MODE_WRITE) { + /* First generate RAND_HEAD_LEN - 2 random bytes. */ + mz_crypt_rand(header, MZ_PKCRYPT_HEADER_SIZE - 2); + + /* Encrypt random header (last two bytes is high word of crc) */ + for (i = 0; i < MZ_PKCRYPT_HEADER_SIZE - 2; i++) + header[i] = mz_stream_pkcrypt_encode(stream, header[i], t); + + header[i++] = mz_stream_pkcrypt_encode(stream, pkcrypt->verify1, t); + header[i++] = mz_stream_pkcrypt_encode(stream, pkcrypt->verify2, t); + + if (mz_stream_write(pkcrypt->stream.base, header, sizeof(header)) != sizeof(header)) + return MZ_WRITE_ERROR; + + pkcrypt->total_out += MZ_PKCRYPT_HEADER_SIZE; + } else if (mode & MZ_OPEN_MODE_READ) { + if (mz_stream_read(pkcrypt->stream.base, header, sizeof(header)) != sizeof(header)) + return MZ_READ_ERROR; + + for (i = 0; i < MZ_PKCRYPT_HEADER_SIZE - 2; i++) + header[i] = mz_stream_pkcrypt_decode(stream, header[i]); + + verify1 = mz_stream_pkcrypt_decode(stream, header[i++]); + verify2 = mz_stream_pkcrypt_decode(stream, header[i++]); + + /* Older versions used 2 byte check, newer versions use 1 byte check. */ + MZ_UNUSED(verify1); + if ((verify2 != 0) && (verify2 != pkcrypt->verify2)) + return MZ_PASSWORD_ERROR; + + pkcrypt->total_in += MZ_PKCRYPT_HEADER_SIZE; + } + + pkcrypt->initialized = 1; + return MZ_OK; +} + +int32_t mz_stream_pkcrypt_is_open(void *stream) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + if (pkcrypt->initialized == 0) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_pkcrypt_read(void *stream, void *buf, int32_t size) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + uint8_t *buf_ptr = (uint8_t *)buf; + int32_t bytes_to_read = size; + int32_t read = 0; + int32_t i = 0; + + + if ((int64_t)bytes_to_read > (pkcrypt->max_total_in - pkcrypt->total_in)) + bytes_to_read = (int32_t)(pkcrypt->max_total_in - pkcrypt->total_in); + + read = mz_stream_read(pkcrypt->stream.base, buf, bytes_to_read); + + for (i = 0; i < read; i++) + buf_ptr[i] = mz_stream_pkcrypt_decode(stream, buf_ptr[i]); + + if (read > 0) + pkcrypt->total_in += read; + + return read; +} + +int32_t mz_stream_pkcrypt_write(void *stream, const void *buf, int32_t size) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + const uint8_t *buf_ptr = (const uint8_t *)buf; + int32_t bytes_to_write = sizeof(pkcrypt->buffer); + int32_t total_written = 0; + int32_t written = 0; + int32_t i = 0; + uint16_t t = 0; + + if (size < 0) + return MZ_PARAM_ERROR; + + do { + if (bytes_to_write > (size - total_written)) + bytes_to_write = (size - total_written); + + for (i = 0; i < bytes_to_write; i += 1) { + pkcrypt->buffer[i] = mz_stream_pkcrypt_encode(stream, *buf_ptr, t); + buf_ptr += 1; + } + + written = mz_stream_write(pkcrypt->stream.base, pkcrypt->buffer, bytes_to_write); + if (written < 0) + return written; + + total_written += written; + } while (total_written < size && written > 0); + + pkcrypt->total_out += total_written; + return total_written; +} + +int64_t mz_stream_pkcrypt_tell(void *stream) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + return mz_stream_tell(pkcrypt->stream.base); +} + +int32_t mz_stream_pkcrypt_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + return mz_stream_seek(pkcrypt->stream.base, offset, origin); +} + +int32_t mz_stream_pkcrypt_close(void *stream) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + pkcrypt->initialized = 0; + return MZ_OK; +} + +int32_t mz_stream_pkcrypt_error(void *stream) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + return pkcrypt->error; +} + +void mz_stream_pkcrypt_set_password(void *stream, const char *password) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + pkcrypt->password = password; +} + +void mz_stream_pkcrypt_set_verify(void *stream, uint8_t verify1, uint8_t verify2) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + pkcrypt->verify1 = verify1; + pkcrypt->verify2 = verify2; +} + +void mz_stream_pkcrypt_get_verify(void *stream, uint8_t *verify1, uint8_t *verify2) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + *verify1 = pkcrypt->verify1; + *verify2 = pkcrypt->verify2; +} + +int32_t mz_stream_pkcrypt_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = pkcrypt->total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = pkcrypt->total_out; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = pkcrypt->max_total_in; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = MZ_PKCRYPT_HEADER_SIZE; + break; + case MZ_STREAM_PROP_FOOTER_SIZE: + *value = 0; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_pkcrypt_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_pkcrypt *pkcrypt = (mz_stream_pkcrypt *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN_MAX: + pkcrypt->max_total_in = value; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +void *mz_stream_pkcrypt_create(void **stream) { + mz_stream_pkcrypt *pkcrypt = NULL; + + pkcrypt = (mz_stream_pkcrypt *)MZ_ALLOC(sizeof(mz_stream_pkcrypt)); + if (pkcrypt != NULL) { + memset(pkcrypt, 0, sizeof(mz_stream_pkcrypt)); + pkcrypt->stream.vtbl = &mz_stream_pkcrypt_vtbl; + } + + if (stream != NULL) + *stream = pkcrypt; + return pkcrypt; +} + +void mz_stream_pkcrypt_delete(void **stream) { + mz_stream_pkcrypt *pkcrypt = NULL; + if (stream == NULL) + return; + pkcrypt = (mz_stream_pkcrypt *)*stream; + if (pkcrypt != NULL) + MZ_FREE(pkcrypt); + *stream = NULL; +} + +void *mz_stream_pkcrypt_get_interface(void) { + return (void *)&mz_stream_pkcrypt_vtbl; +} diff --git a/Externals/minizip/mz_strm_pkcrypt.h b/Externals/minizip/mz_strm_pkcrypt.h new file mode 100644 index 000000000000..453f1f994851 --- /dev/null +++ b/Externals/minizip/mz_strm_pkcrypt.h @@ -0,0 +1,46 @@ +/* mz_strm_pkcrypt.h -- Code for traditional PKWARE encryption + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_PKCRYPT_H +#define MZ_STREAM_PKCRYPT_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_pkcrypt_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_pkcrypt_is_open(void *stream); +int32_t mz_stream_pkcrypt_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_pkcrypt_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_pkcrypt_tell(void *stream); +int32_t mz_stream_pkcrypt_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_pkcrypt_close(void *stream); +int32_t mz_stream_pkcrypt_error(void *stream); + +void mz_stream_pkcrypt_set_password(void *stream, const char *password); +void mz_stream_pkcrypt_set_verify(void *stream, uint8_t verify1, uint8_t verify2); +void mz_stream_pkcrypt_get_verify(void *stream, uint8_t *verify1, uint8_t *verify2); +int32_t mz_stream_pkcrypt_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_pkcrypt_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_pkcrypt_create(void **stream); +void mz_stream_pkcrypt_delete(void **stream); + +void* mz_stream_pkcrypt_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_split.c b/Externals/minizip/mz_strm_split.c new file mode 100644 index 000000000000..12c8bda30396 --- /dev/null +++ b/Externals/minizip/mz_strm_split.c @@ -0,0 +1,438 @@ +/* mz_strm_split.c -- Stream for split files + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_split.h" + +#include /* snprintf */ + +#if defined(_MSC_VER) && (_MSC_VER < 1900) +# define snprintf _snprintf +#endif + +/***************************************************************************/ + +#define MZ_ZIP_MAGIC_DISKHEADER (0x08074b50) + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_split_vtbl = { + mz_stream_split_open, + mz_stream_split_is_open, + mz_stream_split_read, + mz_stream_split_write, + mz_stream_split_tell, + mz_stream_split_seek, + mz_stream_split_close, + mz_stream_split_error, + mz_stream_split_create, + mz_stream_split_delete, + mz_stream_split_get_prop_int64, + mz_stream_split_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_split_s { + mz_stream stream; + int32_t is_open; + int64_t disk_size; + int64_t total_in; + int64_t total_in_disk; + int64_t total_out; + int64_t total_out_disk; + int32_t mode; + char *path_cd; + uint32_t path_cd_size; + char *path_disk; + uint32_t path_disk_size; + int32_t number_disk; + int32_t current_disk; + int64_t current_disk_size; + int32_t reached_end; +} mz_stream_split; + +/***************************************************************************/ + +#if 0 +# define mz_stream_split_print printf +#else +# define mz_stream_split_print(fmt,...) +#endif + +/***************************************************************************/ + +static int32_t mz_stream_split_open_disk(void *stream, int32_t number_disk) { + mz_stream_split *split = (mz_stream_split *)stream; + uint32_t magic = 0; + int64_t position = 0; + int32_t i = 0; + int32_t err = MZ_OK; + int16_t disk_part = 0; + + + /* Check if we are reading or writing a disk part or the cd disk */ + if (number_disk >= 0) { + if ((split->mode & MZ_OPEN_MODE_WRITE) == 0) + disk_part = MZ_OPEN_MODE_READ; + else if (split->disk_size > 0) + disk_part = MZ_OPEN_MODE_WRITE; + } + + /* Construct disk path */ + if (disk_part > 0) { + for (i = (int32_t)strlen(split->path_disk) - 1; i >= 0; i -= 1) { + if (split->path_disk[i] != '.') + continue; + snprintf(&split->path_disk[i], split->path_disk_size - (uint32_t)i, + ".z%02" PRId32, number_disk + 1); + break; + } + } else { + strncpy(split->path_disk, split->path_cd, split->path_disk_size - 1); + split->path_disk[split->path_disk_size - 1] = 0; + } + + mz_stream_split_print("Split - Goto disk - %s (disk %" PRId32 ")\n", split->path_disk, number_disk); + + /* If disk part doesn't exist during reading then return MZ_EXIST_ERROR */ + if (disk_part == MZ_OPEN_MODE_READ) + err = mz_os_file_exists(split->path_disk); + + if (err == MZ_OK) + err = mz_stream_open(split->stream.base, split->path_disk, split->mode); + + if (err == MZ_OK) { + split->total_in_disk = 0; + split->total_out_disk = 0; + split->current_disk = number_disk; + + if (split->mode & MZ_OPEN_MODE_WRITE) { + if ((split->current_disk == 0) && (split->disk_size > 0)) { + err = mz_stream_write_uint32(split->stream.base, MZ_ZIP_MAGIC_DISKHEADER); + + split->total_out_disk += 4; + split->total_out += split->total_out_disk; + } + } else if (split->mode & MZ_OPEN_MODE_READ) { + if (split->current_disk == 0) { + err = mz_stream_read_uint32(split->stream.base, &magic); + if (magic != MZ_ZIP_MAGIC_DISKHEADER) + err = MZ_FORMAT_ERROR; + } + } + } + + if (err == MZ_OK) { + /* Get the size of the current disk we are on */ + position = mz_stream_tell(split->stream.base); + mz_stream_seek(split->stream.base, 0, MZ_SEEK_END); + split->current_disk_size = mz_stream_tell(split->stream.base); + mz_stream_seek(split->stream.base, position, MZ_SEEK_SET); + + split->is_open = 1; + } + + return err; +} + +static int32_t mz_stream_split_close_disk(void *stream) { + mz_stream_split *split = (mz_stream_split *)stream; + + if (mz_stream_is_open(split->stream.base) != MZ_OK) + return MZ_OK; + + mz_stream_split_print("Split - Close disk\n"); + return mz_stream_close(split->stream.base); +} + +static int32_t mz_stream_split_goto_disk(void *stream, int32_t number_disk) { + mz_stream_split *split = (mz_stream_split *)stream; + int32_t err = MZ_OK; + int32_t err_is_open = MZ_OK; + + err_is_open = mz_stream_is_open(split->stream.base); + + if ((split->disk_size == 0) && (split->mode & MZ_OPEN_MODE_WRITE)) { + if (err_is_open != MZ_OK) + err = mz_stream_split_open_disk(stream, number_disk); + } else if ((number_disk != split->current_disk) || (err_is_open != MZ_OK)) { + err = mz_stream_split_close_disk(stream); + if (err == MZ_OK) { + err = mz_stream_split_open_disk(stream, number_disk); + if (err == MZ_OK) + split->number_disk = number_disk; + } + } + + return err; +} + +int32_t mz_stream_split_open(void *stream, const char *path, int32_t mode) { + mz_stream_split *split = (mz_stream_split *)stream; + int32_t number_disk = 0; + + split->mode = mode; + + split->path_cd_size = (uint32_t)strlen(path) + 1; + split->path_cd = (char *)MZ_ALLOC(split->path_cd_size); + + if (split->path_cd == NULL) + return MZ_MEM_ERROR; + + strncpy(split->path_cd, path, split->path_cd_size - 1); + split->path_cd[split->path_cd_size - 1] = 0; + + mz_stream_split_print("Split - Open - %s (disk %" PRId32 ")\n", split->path_cd, number_disk); + + split->path_disk_size = (uint32_t)strlen(path) + 10; + split->path_disk = (char *)MZ_ALLOC(split->path_disk_size); + + if (split->path_disk == NULL) { + MZ_FREE(split->path_cd); + return MZ_MEM_ERROR; + } + + strncpy(split->path_disk, path, split->path_disk_size - 1); + split->path_disk[split->path_disk_size - 1] = 0; + + if ((mode & MZ_OPEN_MODE_WRITE) && ((mode & MZ_OPEN_MODE_APPEND) == 0)) { + number_disk = 0; + split->current_disk = -1; + } else { + number_disk = -1; + split->current_disk = 0; + } + + return mz_stream_split_goto_disk(stream, number_disk); +} + +int32_t mz_stream_split_is_open(void *stream) { + mz_stream_split *split = (mz_stream_split *)stream; + if (split->is_open != 1) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_split_read(void *stream, void *buf, int32_t size) { + mz_stream_split *split = (mz_stream_split *)stream; + int32_t bytes_left = size; + int32_t read = 0; + int32_t err = MZ_OK; + uint8_t *buf_ptr = (uint8_t *)buf; + + err = mz_stream_split_goto_disk(stream, split->number_disk); + if (err != MZ_OK) + return err; + + while (bytes_left > 0) { + read = mz_stream_read(split->stream.base, buf_ptr, bytes_left); + + mz_stream_split_print("Split - Read disk - %" PRId32 "\n", read); + + if (read < 0) + return read; + if (read == 0) { + if (split->current_disk < 0) /* No more disks to goto */ + break; + err = mz_stream_split_goto_disk(stream, split->current_disk + 1); + if (err == MZ_EXIST_ERROR) { + split->current_disk = -1; + break; + } + if (err != MZ_OK) + return err; + } + + bytes_left -= read; + buf_ptr += read; + split->total_in += read; + split->total_in_disk += read; + } + return size - bytes_left; +} + +int32_t mz_stream_split_write(void *stream, const void *buf, int32_t size) { + mz_stream_split *split = (mz_stream_split *)stream; + int64_t position = 0; + int32_t written = 0; + int32_t bytes_left = size; + int32_t bytes_to_write = 0; + int32_t bytes_avail = 0; + int32_t number_disk = -1; + int32_t err = MZ_OK; + const uint8_t *buf_ptr = (const uint8_t *)buf; + + position = mz_stream_tell(split->stream.base); + + while (bytes_left > 0) { + bytes_to_write = bytes_left; + + if (split->disk_size > 0) { + if ((split->total_out_disk == split->disk_size && split->total_out > 0) || + (split->number_disk == -1 && split->number_disk != split->current_disk)) { + if (split->number_disk != -1) + number_disk = split->current_disk + 1; + + err = mz_stream_split_goto_disk(stream, number_disk); + if (err != MZ_OK) + return err; + } + + if (split->number_disk != -1) { + bytes_avail = (int32_t)(split->disk_size - split->total_out_disk); + if (bytes_to_write > bytes_avail) + bytes_to_write = bytes_avail; + } + } + + written = mz_stream_write(split->stream.base, buf_ptr, bytes_to_write); + if (written != bytes_to_write) + return MZ_WRITE_ERROR; + + mz_stream_split_print("Split - Write disk - %" PRId32 "\n", written); + + bytes_left -= written; + buf_ptr += written; + + split->total_out += written; + split->total_out_disk += written; + + if (position == split->current_disk_size) { + split->current_disk_size += written; + position = split->current_disk_size; + } + } + + return size - bytes_left; +} + +int64_t mz_stream_split_tell(void *stream) { + mz_stream_split *split = (mz_stream_split *)stream; + int32_t err = MZ_OK; + err = mz_stream_split_goto_disk(stream, split->number_disk); + if (err != MZ_OK) + return err; + return mz_stream_tell(split->stream.base); +} + +int32_t mz_stream_split_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_split *split = (mz_stream_split *)stream; + int64_t disk_left = 0; + int64_t position = 0; + int32_t err = MZ_OK; + + err = mz_stream_split_goto_disk(stream, split->number_disk); + + if (err != MZ_OK) + return err; + + mz_stream_split_print("Split - Seek disk - %" PRId64 " (origin %" PRId32 ")\n", offset, origin); + + if ((origin == MZ_SEEK_CUR) && (split->number_disk != -1)) { + position = mz_stream_tell(split->stream.base); + disk_left = split->current_disk_size - position; + + while (offset > disk_left) { + err = mz_stream_split_goto_disk(stream, split->current_disk + 1); + if (err != MZ_OK) + return err; + + offset -= disk_left; + disk_left = split->current_disk_size; + } + } + + return mz_stream_seek(split->stream.base, offset, origin); +} + +int32_t mz_stream_split_close(void *stream) { + mz_stream_split *split = (mz_stream_split *)stream; + int32_t err = MZ_OK; + + err = mz_stream_split_close_disk(stream); + split->is_open = 0; + return err; +} + +int32_t mz_stream_split_error(void *stream) { + mz_stream_split *split = (mz_stream_split *)stream; + return mz_stream_error(split->stream.base); +} + +int32_t mz_stream_split_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_split *split = (mz_stream_split *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_OUT: + *value = split->total_out; + break; + case MZ_STREAM_PROP_DISK_NUMBER: + *value = split->number_disk; + break; + case MZ_STREAM_PROP_DISK_SIZE: + *value = split->disk_size; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_split_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_split *split = (mz_stream_split *)stream; + switch (prop) { + case MZ_STREAM_PROP_DISK_NUMBER: + split->number_disk = (int32_t)value; + break; + case MZ_STREAM_PROP_DISK_SIZE: + split->disk_size = value; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +void *mz_stream_split_create(void **stream) { + mz_stream_split *split = NULL; + + split = (mz_stream_split *)MZ_ALLOC(sizeof(mz_stream_split)); + if (split != NULL) { + memset(split, 0, sizeof(mz_stream_split)); + split->stream.vtbl = &mz_stream_split_vtbl; + } + if (stream != NULL) + *stream = split; + + return split; +} + +void mz_stream_split_delete(void **stream) { + mz_stream_split *split = NULL; + if (stream == NULL) + return; + split = (mz_stream_split *)*stream; + if (split != NULL) { + if (split->path_cd) + MZ_FREE(split->path_cd); + if (split->path_disk) + MZ_FREE(split->path_disk); + + MZ_FREE(split); + } + *stream = NULL; +} + +void *mz_stream_split_get_interface(void) { + return (void *)&mz_stream_split_vtbl; +} diff --git a/Externals/minizip/mz_strm_split.h b/Externals/minizip/mz_strm_split.h new file mode 100644 index 000000000000..da404dae2aaa --- /dev/null +++ b/Externals/minizip/mz_strm_split.h @@ -0,0 +1,43 @@ +/* mz_strm_split.h -- Stream for split files + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_SPLIT_H +#define MZ_STREAM_SPLIT_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_split_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_split_is_open(void *stream); +int32_t mz_stream_split_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_split_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_split_tell(void *stream); +int32_t mz_stream_split_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_split_close(void *stream); +int32_t mz_stream_split_error(void *stream); + +int32_t mz_stream_split_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_split_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_split_create(void **stream); +void mz_stream_split_delete(void **stream); + +void* mz_stream_split_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_wzaes.c b/Externals/minizip/mz_strm_wzaes.c new file mode 100644 index 000000000000..fd0119991a9b --- /dev/null +++ b/Externals/minizip/mz_strm_wzaes.c @@ -0,0 +1,362 @@ +/* mz_strm_wzaes.c -- Stream for WinZip AES encryption + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 1998-2010 Brian Gladman, Worcester, UK + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_crypt.h" +#include "mz_strm.h" +#include "mz_strm_wzaes.h" + +/***************************************************************************/ + +#define MZ_AES_KEYING_ITERATIONS (1000) +#define MZ_AES_SALT_LENGTH(MODE) (4 * (MODE & 3) + 4) +#define MZ_AES_SALT_LENGTH_MAX (16) +#define MZ_AES_PW_LENGTH_MAX (128) +#define MZ_AES_PW_VERIFY_SIZE (2) +#define MZ_AES_AUTHCODE_SIZE (10) + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_wzaes_vtbl = { + mz_stream_wzaes_open, + mz_stream_wzaes_is_open, + mz_stream_wzaes_read, + mz_stream_wzaes_write, + mz_stream_wzaes_tell, + mz_stream_wzaes_seek, + mz_stream_wzaes_close, + mz_stream_wzaes_error, + mz_stream_wzaes_create, + mz_stream_wzaes_delete, + mz_stream_wzaes_get_prop_int64, + mz_stream_wzaes_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_wzaes_s { + mz_stream stream; + int32_t mode; + int32_t error; + int16_t initialized; + uint8_t buffer[UINT16_MAX]; + int64_t total_in; + int64_t max_total_in; + int64_t total_out; + int16_t encryption_mode; + const char *password; + void *aes; + uint32_t crypt_pos; + uint8_t crypt_block[MZ_AES_BLOCK_SIZE]; + void *hmac; + uint8_t nonce[MZ_AES_BLOCK_SIZE]; +} mz_stream_wzaes; + +/***************************************************************************/ + +int32_t mz_stream_wzaes_open(void *stream, const char *path, int32_t mode) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + uint16_t salt_length = 0; + uint16_t password_length = 0; + uint16_t key_length = 0; + uint8_t kbuf[2 * MZ_AES_KEY_LENGTH_MAX + MZ_AES_PW_VERIFY_SIZE]; + uint8_t verify[MZ_AES_PW_VERIFY_SIZE]; + uint8_t verify_expected[MZ_AES_PW_VERIFY_SIZE]; + uint8_t salt_value[MZ_AES_SALT_LENGTH_MAX]; + const char *password = path; + + wzaes->total_in = 0; + wzaes->total_out = 0; + wzaes->initialized = 0; + + if (mz_stream_is_open(wzaes->stream.base) != MZ_OK) + return MZ_OPEN_ERROR; + + if (password == NULL) + password = wzaes->password; + if (password == NULL) + return MZ_PARAM_ERROR; + password_length = (uint16_t)strlen(password); + if (password_length > MZ_AES_PW_LENGTH_MAX) + return MZ_PARAM_ERROR; + + if (wzaes->encryption_mode < 1 || wzaes->encryption_mode > 3) + return MZ_PARAM_ERROR; + + salt_length = MZ_AES_SALT_LENGTH(wzaes->encryption_mode); + + if (mode & MZ_OPEN_MODE_WRITE) { + mz_crypt_rand(salt_value, salt_length); + } else if (mode & MZ_OPEN_MODE_READ) { + if (mz_stream_read(wzaes->stream.base, salt_value, salt_length) != salt_length) + return MZ_READ_ERROR; + } + + key_length = MZ_AES_KEY_LENGTH(wzaes->encryption_mode); + + /* Derive the encryption and authentication keys and the password verifier */ + mz_crypt_pbkdf2((uint8_t *)password, password_length, salt_value, salt_length, + MZ_AES_KEYING_ITERATIONS, kbuf, 2 * key_length + MZ_AES_PW_VERIFY_SIZE); + + /* Initialize the encryption nonce and buffer pos */ + wzaes->crypt_pos = MZ_AES_BLOCK_SIZE; + memset(wzaes->nonce, 0, sizeof(wzaes->nonce)); + + /* Initialize for encryption using key 1 */ + mz_crypt_aes_reset(wzaes->aes); + mz_crypt_aes_set_mode(wzaes->aes, wzaes->encryption_mode); + mz_crypt_aes_set_encrypt_key(wzaes->aes, kbuf, key_length); + + /* Initialize for authentication using key 2 */ + mz_crypt_hmac_reset(wzaes->hmac); + mz_crypt_hmac_set_algorithm(wzaes->hmac, MZ_HASH_SHA1); + mz_crypt_hmac_init(wzaes->hmac, kbuf + key_length, key_length); + + memcpy(verify, kbuf + (2 * key_length), MZ_AES_PW_VERIFY_SIZE); + + if (mode & MZ_OPEN_MODE_WRITE) { + if (mz_stream_write(wzaes->stream.base, salt_value, salt_length) != salt_length) + return MZ_WRITE_ERROR; + + wzaes->total_out += salt_length; + + if (mz_stream_write(wzaes->stream.base, verify, MZ_AES_PW_VERIFY_SIZE) != MZ_AES_PW_VERIFY_SIZE) + return MZ_WRITE_ERROR; + + wzaes->total_out += MZ_AES_PW_VERIFY_SIZE; + } else if (mode & MZ_OPEN_MODE_READ) { + wzaes->total_in += salt_length; + + if (mz_stream_read(wzaes->stream.base, verify_expected, MZ_AES_PW_VERIFY_SIZE) != MZ_AES_PW_VERIFY_SIZE) + return MZ_READ_ERROR; + + wzaes->total_in += MZ_AES_PW_VERIFY_SIZE; + + if (memcmp(verify_expected, verify, MZ_AES_PW_VERIFY_SIZE) != 0) + return MZ_PASSWORD_ERROR; + } + + wzaes->mode = mode; + wzaes->initialized = 1; + + return MZ_OK; +} + +int32_t mz_stream_wzaes_is_open(void *stream) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + if (wzaes->initialized == 0) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +static int32_t mz_stream_wzaes_ctr_encrypt(void *stream, uint8_t *buf, int32_t size) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + uint32_t pos = wzaes->crypt_pos; + uint32_t i = 0; + int32_t err = MZ_OK; + + while (i < (uint32_t)size) { + if (pos == MZ_AES_BLOCK_SIZE) { + uint32_t j = 0; + + /* Increment encryption nonce */ + while (j < 8 && !++wzaes->nonce[j]) + j += 1; + + /* Encrypt the nonce to form next xor buffer */ + memcpy(wzaes->crypt_block, wzaes->nonce, MZ_AES_BLOCK_SIZE); + mz_crypt_aes_encrypt(wzaes->aes, wzaes->crypt_block, sizeof(wzaes->crypt_block)); + pos = 0; + } + + buf[i++] ^= wzaes->crypt_block[pos++]; + } + + wzaes->crypt_pos = pos; + return err; +} + +int32_t mz_stream_wzaes_read(void *stream, void *buf, int32_t size) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + int64_t max_total_in = 0; + int32_t bytes_to_read = size; + int32_t read = 0; + + max_total_in = wzaes->max_total_in - MZ_AES_FOOTER_SIZE; + if ((int64_t)bytes_to_read > (max_total_in - wzaes->total_in)) + bytes_to_read = (int32_t)(max_total_in - wzaes->total_in); + + read = mz_stream_read(wzaes->stream.base, buf, bytes_to_read); + + if (read > 0) { + mz_crypt_hmac_update(wzaes->hmac, (uint8_t *)buf, read); + mz_stream_wzaes_ctr_encrypt(stream, (uint8_t *)buf, read); + + wzaes->total_in += read; + } + + return read; +} + +int32_t mz_stream_wzaes_write(void *stream, const void *buf, int32_t size) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + const uint8_t *buf_ptr = (const uint8_t *)buf; + int32_t bytes_to_write = sizeof(wzaes->buffer); + int32_t total_written = 0; + int32_t written = 0; + + if (size < 0) + return MZ_PARAM_ERROR; + + do { + if (bytes_to_write > (size - total_written)) + bytes_to_write = (size - total_written); + + memcpy(wzaes->buffer, buf_ptr, bytes_to_write); + buf_ptr += bytes_to_write; + + mz_stream_wzaes_ctr_encrypt(stream, (uint8_t *)wzaes->buffer, bytes_to_write); + mz_crypt_hmac_update(wzaes->hmac, wzaes->buffer, bytes_to_write); + + written = mz_stream_write(wzaes->stream.base, wzaes->buffer, bytes_to_write); + if (written < 0) + return written; + + total_written += written; + } while (total_written < size && written > 0); + + wzaes->total_out += total_written; + return total_written; +} + +int64_t mz_stream_wzaes_tell(void *stream) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + return mz_stream_tell(wzaes->stream.base); +} + +int32_t mz_stream_wzaes_seek(void *stream, int64_t offset, int32_t origin) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + return mz_stream_seek(wzaes->stream.base, offset, origin); +} + +int32_t mz_stream_wzaes_close(void *stream) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + uint8_t expected_hash[MZ_AES_AUTHCODE_SIZE]; + uint8_t computed_hash[MZ_HASH_SHA1_SIZE]; + + mz_crypt_hmac_end(wzaes->hmac, computed_hash, sizeof(computed_hash)); + + if (wzaes->mode & MZ_OPEN_MODE_WRITE) { + if (mz_stream_write(wzaes->stream.base, computed_hash, MZ_AES_AUTHCODE_SIZE) != MZ_AES_AUTHCODE_SIZE) + return MZ_WRITE_ERROR; + + wzaes->total_out += MZ_AES_AUTHCODE_SIZE; + } else if (wzaes->mode & MZ_OPEN_MODE_READ) { + if (mz_stream_read(wzaes->stream.base, expected_hash, MZ_AES_AUTHCODE_SIZE) != MZ_AES_AUTHCODE_SIZE) + return MZ_READ_ERROR; + + wzaes->total_in += MZ_AES_AUTHCODE_SIZE; + + /* If entire entry was not read this will fail */ + if (memcmp(computed_hash, expected_hash, MZ_AES_AUTHCODE_SIZE) != 0) + return MZ_CRC_ERROR; + } + + wzaes->initialized = 0; + return MZ_OK; +} + +int32_t mz_stream_wzaes_error(void *stream) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + return wzaes->error; +} + +void mz_stream_wzaes_set_password(void *stream, const char *password) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + wzaes->password = password; +} + +void mz_stream_wzaes_set_encryption_mode(void *stream, int16_t encryption_mode) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + wzaes->encryption_mode = encryption_mode; +} + +int32_t mz_stream_wzaes_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = wzaes->total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = wzaes->total_out; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = wzaes->max_total_in; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = MZ_AES_SALT_LENGTH((int64_t)wzaes->encryption_mode) + MZ_AES_PW_VERIFY_SIZE; + break; + case MZ_STREAM_PROP_FOOTER_SIZE: + *value = MZ_AES_AUTHCODE_SIZE; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_wzaes_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_wzaes *wzaes = (mz_stream_wzaes *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN_MAX: + wzaes->max_total_in = value; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +void *mz_stream_wzaes_create(void **stream) { + mz_stream_wzaes *wzaes = NULL; + + wzaes = (mz_stream_wzaes *)MZ_ALLOC(sizeof(mz_stream_wzaes)); + if (wzaes != NULL) { + memset(wzaes, 0, sizeof(mz_stream_wzaes)); + wzaes->stream.vtbl = &mz_stream_wzaes_vtbl; + wzaes->encryption_mode = MZ_AES_ENCRYPTION_MODE_256; + + mz_crypt_hmac_create(&wzaes->hmac); + mz_crypt_aes_create(&wzaes->aes); + } + if (stream != NULL) + *stream = wzaes; + + return wzaes; +} + +void mz_stream_wzaes_delete(void **stream) { + mz_stream_wzaes *wzaes = NULL; + if (stream == NULL) + return; + wzaes = (mz_stream_wzaes *)*stream; + if (wzaes != NULL) { + mz_crypt_aes_delete(&wzaes->aes); + mz_crypt_hmac_delete(&wzaes->hmac); + MZ_FREE(wzaes); + } + *stream = NULL; +} + +void *mz_stream_wzaes_get_interface(void) { + return (void *)&mz_stream_wzaes_vtbl; +} diff --git a/Externals/minizip/mz_strm_wzaes.h b/Externals/minizip/mz_strm_wzaes.h new file mode 100644 index 000000000000..e27f11203f0e --- /dev/null +++ b/Externals/minizip/mz_strm_wzaes.h @@ -0,0 +1,46 @@ +/* mz_strm_wzaes.h -- Stream for WinZIP AES encryption + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_WZAES_SHA1_H +#define MZ_STREAM_WZAES_SHA1_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_wzaes_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_wzaes_is_open(void *stream); +int32_t mz_stream_wzaes_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_wzaes_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_wzaes_tell(void *stream); +int32_t mz_stream_wzaes_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_wzaes_close(void *stream); +int32_t mz_stream_wzaes_error(void *stream); + +void mz_stream_wzaes_set_password(void *stream, const char *password); +void mz_stream_wzaes_set_encryption_mode(void *stream, int16_t encryption_mode); + +int32_t mz_stream_wzaes_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_wzaes_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_wzaes_create(void **stream); +void mz_stream_wzaes_delete(void **stream); + +void* mz_stream_wzaes_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_zlib.c b/Externals/minizip/mz_strm_zlib.c new file mode 100644 index 000000000000..e83bbac91a6f --- /dev/null +++ b/Externals/minizip/mz_strm_zlib.c @@ -0,0 +1,393 @@ +/* mz_strm_zlib.c -- Stream for zlib inflate/deflate + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_zlib.h" + +#include "zlib.h" +#if defined(ZLIBNG_VERNUM) && !defined(ZLIB_COMPAT) +# include "zlib-ng.h" +#endif + +/***************************************************************************/ + +#if defined(ZLIBNG_VERNUM) && !defined(ZLIB_COMPAT) +# define ZLIB_PREFIX(x) zng_ ## x + typedef zng_stream zlib_stream; +#else +# define ZLIB_PREFIX(x) x + typedef z_stream zlib_stream; +#endif + +#if !defined(DEF_MEM_LEVEL) +# if MAX_MEM_LEVEL >= 8 +# define DEF_MEM_LEVEL 8 +# else +# define DEF_MEM_LEVEL MAX_MEM_LEVEL +# endif +#endif + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_zlib_vtbl = { + mz_stream_zlib_open, + mz_stream_zlib_is_open, + mz_stream_zlib_read, + mz_stream_zlib_write, + mz_stream_zlib_tell, + mz_stream_zlib_seek, + mz_stream_zlib_close, + mz_stream_zlib_error, + mz_stream_zlib_create, + mz_stream_zlib_delete, + mz_stream_zlib_get_prop_int64, + mz_stream_zlib_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_zlib_s { + mz_stream stream; + zlib_stream zstream; + uint8_t buffer[INT16_MAX]; + int32_t buffer_len; + int64_t total_in; + int64_t total_out; + int64_t max_total_in; + int8_t initialized; + int16_t level; + int32_t window_bits; + int32_t mode; + int32_t error; +} mz_stream_zlib; + +/***************************************************************************/ + +int32_t mz_stream_zlib_open(void *stream, const char *path, int32_t mode) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + + MZ_UNUSED(path); + + zlib->zstream.data_type = Z_BINARY; + zlib->zstream.zalloc = Z_NULL; + zlib->zstream.zfree = Z_NULL; + zlib->zstream.opaque = Z_NULL; + zlib->zstream.total_in = 0; + zlib->zstream.total_out = 0; + + zlib->total_in = 0; + zlib->total_out = 0; + + if (mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + zlib->zstream.next_out = zlib->buffer; + zlib->zstream.avail_out = sizeof(zlib->buffer); + + zlib->error = ZLIB_PREFIX(deflateInit2)(&zlib->zstream, (int8_t)zlib->level, Z_DEFLATED, + zlib->window_bits, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +#endif + } else if (mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + zlib->zstream.next_in = zlib->buffer; + zlib->zstream.avail_in = 0; + + zlib->error = ZLIB_PREFIX(inflateInit2)(&zlib->zstream, zlib->window_bits); +#endif + } + + if (zlib->error != Z_OK) + return MZ_OPEN_ERROR; + + zlib->initialized = 1; + zlib->mode = mode; + return MZ_OK; +} + +int32_t mz_stream_zlib_is_open(void *stream) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + if (zlib->initialized != 1) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_zlib_read(void *stream, void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + uint64_t total_in_before = 0; + uint64_t total_in_after = 0; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + uint32_t total_in = 0; + uint32_t total_out = 0; + uint32_t in_bytes = 0; + uint32_t out_bytes = 0; + int32_t bytes_to_read = sizeof(zlib->buffer); + int32_t read = 0; + int32_t err = Z_OK; + + + zlib->zstream.next_out = (Bytef*)buf; + zlib->zstream.avail_out = (uInt)size; + + do { + if (zlib->zstream.avail_in == 0) { + if (zlib->max_total_in > 0) { + if ((int64_t)bytes_to_read > (zlib->max_total_in - zlib->total_in)) + bytes_to_read = (int32_t)(zlib->max_total_in - zlib->total_in); + } + + read = mz_stream_read(zlib->stream.base, zlib->buffer, bytes_to_read); + + if (read < 0) + return read; + + zlib->zstream.next_in = zlib->buffer; + zlib->zstream.avail_in = read; + } + + total_in_before = zlib->zstream.avail_in; + total_out_before = zlib->zstream.total_out; + + err = ZLIB_PREFIX(inflate)(&zlib->zstream, Z_SYNC_FLUSH); + if ((err >= Z_OK) && (zlib->zstream.msg != NULL)) { + zlib->error = Z_DATA_ERROR; + break; + } + + total_in_after = zlib->zstream.avail_in; + total_out_after = zlib->zstream.total_out; + + in_bytes = (uint32_t)(total_in_before - total_in_after); + out_bytes = (uint32_t)(total_out_after - total_out_before); + + total_in += in_bytes; + total_out += out_bytes; + + zlib->total_in += in_bytes; + zlib->total_out += out_bytes; + + if (err == Z_STREAM_END) + break; + if (err != Z_OK) { + zlib->error = err; + break; + } + } while (zlib->zstream.avail_out > 0); + + if (zlib->error != 0) { + /* Zlib errors are compatible with MZ */ + return zlib->error; + } + + return total_out; +#endif +} + +#ifndef MZ_ZIP_NO_COMPRESSION +static int32_t mz_stream_zlib_flush(void *stream) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + if (mz_stream_write(zlib->stream.base, zlib->buffer, zlib->buffer_len) != zlib->buffer_len) + return MZ_WRITE_ERROR; + return MZ_OK; +} + +static int32_t mz_stream_zlib_deflate(void *stream, int flush) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + int32_t out_bytes = 0; + int32_t err = Z_OK; + + + do { + if (zlib->zstream.avail_out == 0) { + err = mz_stream_zlib_flush(zlib); + if (err != MZ_OK) + return err; + + zlib->zstream.avail_out = sizeof(zlib->buffer); + zlib->zstream.next_out = zlib->buffer; + + zlib->buffer_len = 0; + } + + total_out_before = zlib->zstream.total_out; + err = ZLIB_PREFIX(deflate)(&zlib->zstream, flush); + total_out_after = zlib->zstream.total_out; + + out_bytes = (uint32_t)(total_out_after - total_out_before); + + zlib->buffer_len += out_bytes; + zlib->total_out += out_bytes; + + if (err == Z_STREAM_END) + break; + if (err != Z_OK) { + zlib->error = err; + return MZ_DATA_ERROR; + } + } while ((zlib->zstream.avail_in > 0) || (flush == Z_FINISH && err == Z_OK)); + + return MZ_OK; +} +#endif + +int32_t mz_stream_zlib_write(void *stream, const void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_COMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + int32_t err = MZ_OK; + + zlib->zstream.next_in = (Bytef*)(intptr_t)buf; + zlib->zstream.avail_in = (uInt)size; + + err = mz_stream_zlib_deflate(stream, Z_NO_FLUSH); + if (err != MZ_OK) { + return err; + } + + zlib->total_in += size; + return size; +#endif +} + +int64_t mz_stream_zlib_tell(void *stream) { + MZ_UNUSED(stream); + + return MZ_TELL_ERROR; +} + +int32_t mz_stream_zlib_seek(void *stream, int64_t offset, int32_t origin) { + MZ_UNUSED(stream); + MZ_UNUSED(offset); + MZ_UNUSED(origin); + + return MZ_SEEK_ERROR; +} + +int32_t mz_stream_zlib_close(void *stream) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + + + if (zlib->mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + mz_stream_zlib_deflate(stream, Z_FINISH); + mz_stream_zlib_flush(stream); + + ZLIB_PREFIX(deflateEnd)(&zlib->zstream); +#endif + } else if (zlib->mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + ZLIB_PREFIX(inflateEnd)(&zlib->zstream); +#endif + } + + zlib->initialized = 0; + + if (zlib->error != Z_OK) + return MZ_CLOSE_ERROR; + return MZ_OK; +} + +int32_t mz_stream_zlib_error(void *stream) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + return zlib->error; +} + +int32_t mz_stream_zlib_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = zlib->total_in; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = zlib->max_total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = zlib->total_out; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = 0; + break; + case MZ_STREAM_PROP_COMPRESS_WINDOW: + *value = zlib->window_bits; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_zlib_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_zlib *zlib = (mz_stream_zlib *)stream; + switch (prop) { + case MZ_STREAM_PROP_COMPRESS_LEVEL: + zlib->level = (int16_t)value; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + zlib->max_total_in = value; + break; + case MZ_STREAM_PROP_COMPRESS_WINDOW: + zlib->window_bits = (int32_t)value; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +void *mz_stream_zlib_create(void **stream) { + mz_stream_zlib *zlib = NULL; + + zlib = (mz_stream_zlib *)MZ_ALLOC(sizeof(mz_stream_zlib)); + if (zlib != NULL) { + memset(zlib, 0, sizeof(mz_stream_zlib)); + zlib->stream.vtbl = &mz_stream_zlib_vtbl; + zlib->level = Z_DEFAULT_COMPRESSION; + zlib->window_bits = -MAX_WBITS; + } + if (stream != NULL) + *stream = zlib; + + return zlib; +} + +void mz_stream_zlib_delete(void **stream) { + mz_stream_zlib *zlib = NULL; + if (stream == NULL) + return; + zlib = (mz_stream_zlib *)*stream; + if (zlib != NULL) + MZ_FREE(zlib); + *stream = NULL; +} + +void *mz_stream_zlib_get_interface(void) { + return (void *)&mz_stream_zlib_vtbl; +} diff --git a/Externals/minizip/mz_strm_zlib.h b/Externals/minizip/mz_strm_zlib.h new file mode 100644 index 000000000000..47f74804fb45 --- /dev/null +++ b/Externals/minizip/mz_strm_zlib.h @@ -0,0 +1,43 @@ +/* mz_strm_zlib.h -- Stream for zlib inflate/deflate + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_ZLIB_H +#define MZ_STREAM_ZLIB_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_zlib_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_zlib_is_open(void *stream); +int32_t mz_stream_zlib_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_zlib_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_zlib_tell(void *stream); +int32_t mz_stream_zlib_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_zlib_close(void *stream); +int32_t mz_stream_zlib_error(void *stream); + +int32_t mz_stream_zlib_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_zlib_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_zlib_create(void **stream); +void mz_stream_zlib_delete(void **stream); + +void* mz_stream_zlib_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_strm_zstd.c b/Externals/minizip/mz_strm_zstd.c new file mode 100644 index 000000000000..4c9c33561819 --- /dev/null +++ b/Externals/minizip/mz_strm_zstd.c @@ -0,0 +1,351 @@ +/* mz_strm_zstd.c -- Stream for zstd compress/decompress + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Authors: Force Charlie + https://github.com/fcharlie + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_strm.h" +#include "mz_strm_zstd.h" + +#include + +/***************************************************************************/ + +static mz_stream_vtbl mz_stream_zstd_vtbl = { + mz_stream_zstd_open, + mz_stream_zstd_is_open, + mz_stream_zstd_read, + mz_stream_zstd_write, + mz_stream_zstd_tell, + mz_stream_zstd_seek, + mz_stream_zstd_close, + mz_stream_zstd_error, + mz_stream_zstd_create, + mz_stream_zstd_delete, + mz_stream_zstd_get_prop_int64, + mz_stream_zstd_set_prop_int64 +}; + +/***************************************************************************/ + +typedef struct mz_stream_zstd_s { + mz_stream stream; + ZSTD_CStream *zcstream; + ZSTD_DStream *zdstream; + ZSTD_outBuffer out; + ZSTD_inBuffer in; + int32_t mode; + int32_t error; + uint8_t buffer[INT16_MAX]; + int32_t buffer_len; + int64_t total_in; + int64_t total_out; + int64_t max_total_in; + int64_t max_total_out; + int8_t initialized; + uint32_t preset; +} mz_stream_zstd; + +/***************************************************************************/ + +int32_t mz_stream_zstd_open(void *stream, const char *path, int32_t mode) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + + MZ_UNUSED(path); + + if (mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + zstd->zcstream = ZSTD_createCStream(); + zstd->out.dst = zstd->buffer; + zstd->out.size = sizeof(zstd->buffer); + zstd->out.pos = 0; +#endif + } else if (mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + zstd->zdstream = ZSTD_createDStream(); + memset(&zstd->out, 0, sizeof(ZSTD_outBuffer)); +#endif + } + + memset(&zstd->in, 0, sizeof(ZSTD_inBuffer)); + + zstd->initialized = 1; + zstd->mode = mode; + zstd->error = MZ_OK; + + return MZ_OK; +} + +int32_t mz_stream_zstd_is_open(void *stream) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + if (zstd->initialized != 1) + return MZ_OPEN_ERROR; + return MZ_OK; +} + +int32_t mz_stream_zstd_read(void *stream, void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + uint64_t total_in_before = 0; + uint64_t total_in_after = 0; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + int32_t total_in = 0; + int32_t total_out = 0; + int32_t in_bytes = 0; + int32_t out_bytes = 0; + int32_t bytes_to_read = sizeof(zstd->buffer); + int32_t read = 0; + size_t result = 0; + + zstd->out.dst = (void*)buf; + zstd->out.size = (size_t)size; + zstd->out.pos = 0; + + do { + if (zstd->in.pos == zstd->in.size) { + if (zstd->max_total_in > 0) { + if ((int64_t)bytes_to_read > (zstd->max_total_in - zstd->total_in)) + bytes_to_read = (int32_t)(zstd->max_total_in - zstd->total_in); + } + + read = mz_stream_read(zstd->stream.base, zstd->buffer, bytes_to_read); + + if (read < 0) + return read; + + zstd->in.src = (const void*)zstd->buffer; + zstd->in.pos = 0; + zstd->in.size = (size_t)read; + } + + total_in_before = zstd->in.pos; + total_out_before = zstd->out.pos; + + result = ZSTD_decompressStream(zstd->zdstream, &zstd->out, &zstd->in); + + if (ZSTD_isError(result)) { + zstd->error = (int32_t)result; + return MZ_DATA_ERROR; + } + + total_in_after = zstd->in.pos; + total_out_after = zstd->out.pos; + if ((zstd->max_total_out != -1) && (int64_t)total_out_after > zstd->max_total_out) + total_out_after = (uint64_t)zstd->max_total_out; + + in_bytes = (int32_t)(total_in_after - total_in_before); + out_bytes = (int32_t)(total_out_after - total_out_before); + + total_in += in_bytes; + total_out += out_bytes; + + zstd->total_in += in_bytes; + zstd->total_out += out_bytes; + + } while ((zstd->in.size > 0 || out_bytes > 0) && (zstd->out.pos < zstd->out.size)); + + return total_out; +#endif +} + +#ifndef MZ_ZIP_NO_COMPRESSION +static int32_t mz_stream_zstd_flush(void *stream) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + if (mz_stream_write(zstd->stream.base, zstd->buffer, zstd->buffer_len) != zstd->buffer_len) + return MZ_WRITE_ERROR; + return MZ_OK; +} + +static int32_t mz_stream_zstd_compress(void *stream, ZSTD_EndDirective flush) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + uint64_t total_out_before = 0; + uint64_t total_out_after = 0; + int32_t out_bytes = 0; + size_t result = 0; + int32_t err = 0; + + do { + if (zstd->out.pos == zstd->out.size) { + err = mz_stream_zstd_flush(zstd); + if (err != MZ_OK) + return err; + + zstd->out.dst = zstd->buffer; + zstd->out.size = sizeof(zstd->buffer); + zstd->out.pos = 0; + + zstd->buffer_len = 0; + } + + total_out_before = zstd->out.pos; + + result = ZSTD_compressStream2(zstd->zcstream, &zstd->out, &zstd->in, flush); + + total_out_after = zstd->out.pos; + + out_bytes = (uint32_t)(total_out_after - total_out_before); + + zstd->buffer_len += out_bytes; + zstd->total_out += out_bytes; + + if (ZSTD_isError(result)) { + zstd->error = (int32_t)result; + return MZ_DATA_ERROR; + } + } while ((zstd->in.pos < zstd->in.size) || (flush == ZSTD_e_end && result != 0)); + + return MZ_OK; +} +#endif + +int32_t mz_stream_zstd_write(void *stream, const void *buf, int32_t size) { +#ifdef MZ_ZIP_NO_COMPRESSION + MZ_UNUSED(stream); + MZ_UNUSED(buf); + MZ_UNUSED(size); + return MZ_SUPPORT_ERROR; +#else + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + int32_t err = MZ_OK; + + zstd->in.src = buf; + zstd->in.pos = 0; + zstd->in.size = size; + + err = mz_stream_zstd_compress(stream, ZSTD_e_continue); + if (err != MZ_OK) { + return err; + } + + zstd->total_in += size; + return size; +#endif +} + +int64_t mz_stream_zstd_tell(void *stream) { + MZ_UNUSED(stream); + + return MZ_TELL_ERROR; +} + +int32_t mz_stream_zstd_seek(void *stream, int64_t offset, int32_t origin) { + MZ_UNUSED(stream); + MZ_UNUSED(offset); + MZ_UNUSED(origin); + + return MZ_SEEK_ERROR; +} + +int32_t mz_stream_zstd_close(void *stream) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + + if (zstd->mode & MZ_OPEN_MODE_WRITE) { +#ifdef MZ_ZIP_NO_COMPRESSION + return MZ_SUPPORT_ERROR; +#else + mz_stream_zstd_compress(stream, ZSTD_e_end); + mz_stream_zstd_flush(stream); + + ZSTD_freeCStream(zstd->zcstream); + zstd->zcstream = NULL; +#endif + } else if (zstd->mode & MZ_OPEN_MODE_READ) { +#ifdef MZ_ZIP_NO_DECOMPRESSION + return MZ_SUPPORT_ERROR; +#else + ZSTD_freeDStream(zstd->zdstream); + zstd->zdstream = NULL; +#endif + } + zstd->initialized = 0; + return MZ_OK; +} + +int32_t mz_stream_zstd_error(void *stream) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + return zstd->error; +} + +int32_t mz_stream_zstd_get_prop_int64(void *stream, int32_t prop, int64_t *value) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + switch (prop) { + case MZ_STREAM_PROP_TOTAL_IN: + *value = zstd->total_in; + break; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + *value = zstd->max_total_in; + break; + case MZ_STREAM_PROP_TOTAL_OUT: + *value = zstd->total_out; + break; + case MZ_STREAM_PROP_TOTAL_OUT_MAX: + *value = zstd->max_total_out; + break; + case MZ_STREAM_PROP_HEADER_SIZE: + *value = 0; + break; + default: + return MZ_EXIST_ERROR; + } + return MZ_OK; +} + +int32_t mz_stream_zstd_set_prop_int64(void *stream, int32_t prop, int64_t value) { + mz_stream_zstd *zstd = (mz_stream_zstd *)stream; + switch (prop) { + case MZ_STREAM_PROP_COMPRESS_LEVEL: + if (value < 0) + zstd->preset = 6; + else + zstd->preset = (int16_t)value; + return MZ_OK; + case MZ_STREAM_PROP_TOTAL_IN_MAX: + zstd->max_total_in = value; + return MZ_OK; + } + return MZ_EXIST_ERROR; +} + +void *mz_stream_zstd_create(void **stream) { + mz_stream_zstd *zstd = NULL; + zstd = (mz_stream_zstd *)MZ_ALLOC(sizeof(mz_stream_zstd)); + if (zstd != NULL) { + memset(zstd, 0, sizeof(mz_stream_zstd)); + zstd->stream.vtbl = &mz_stream_zstd_vtbl; + zstd->max_total_out = -1; + } + if (stream != NULL) + *stream = zstd; + return zstd; +} + +void mz_stream_zstd_delete(void **stream) { + mz_stream_zstd *zstd = NULL; + if (stream == NULL) + return; + zstd = (mz_stream_zstd *)*stream; + if (zstd != NULL) + MZ_FREE(zstd); + *stream = NULL; +} + +void *mz_stream_zstd_get_interface(void) { + return (void *)&mz_stream_zstd_vtbl; +} diff --git a/Externals/minizip/mz_strm_zstd.h b/Externals/minizip/mz_strm_zstd.h new file mode 100644 index 000000000000..35870835485e --- /dev/null +++ b/Externals/minizip/mz_strm_zstd.h @@ -0,0 +1,44 @@ +/* mz_strm_zlib.h -- Stream for zlib inflate/deflate + Version 2.9.2, February 12, 2020 + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_STREAM_ZSTD_H +#define MZ_STREAM_ZSTD_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +int32_t mz_stream_zstd_open(void *stream, const char *filename, int32_t mode); +int32_t mz_stream_zstd_is_open(void *stream); +int32_t mz_stream_zstd_read(void *stream, void *buf, int32_t size); +int32_t mz_stream_zstd_write(void *stream, const void *buf, int32_t size); +int64_t mz_stream_zstd_tell(void *stream); +int32_t mz_stream_zstd_seek(void *stream, int64_t offset, int32_t origin); +int32_t mz_stream_zstd_close(void *stream); +int32_t mz_stream_zstd_error(void *stream); + +int32_t mz_stream_zstd_get_prop_int64(void *stream, int32_t prop, int64_t *value); +int32_t mz_stream_zstd_set_prop_int64(void *stream, int32_t prop, int64_t value); + +void* mz_stream_zstd_create(void **stream); +void mz_stream_zstd_delete(void **stream); + +void* mz_stream_zstd_get_interface(void); + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/mz_zip.c b/Externals/minizip/mz_zip.c new file mode 100644 index 000000000000..cc4f57d0b38c --- /dev/null +++ b/Externals/minizip/mz_zip.c @@ -0,0 +1,2771 @@ +/* zip.c -- Zip manipulation + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 2009-2010 Mathias Svensson + Modifications for Zip64 support + http://result42.com + Copyright (C) 2007-2008 Even Rouault + Modifications of Unzip for Zip64 + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + + +#include "mz.h" +#include "mz_crypt.h" +#include "mz_strm.h" +#ifdef HAVE_BZIP2 +# include "mz_strm_bzip.h" +#endif +#ifdef HAVE_LIBCOMP +# include "mz_strm_libcomp.h" +#endif +#ifdef HAVE_LZMA +# include "mz_strm_lzma.h" +#endif +#include "mz_strm_mem.h" +#ifdef HAVE_PKCRYPT +# include "mz_strm_pkcrypt.h" +#endif +#ifdef HAVE_WZAES +# include "mz_strm_wzaes.h" +#endif +#ifdef HAVE_ZLIB +# include "mz_strm_zlib.h" +#endif +#ifdef HAVE_ZSTD +# include "mz_strm_zstd.h" +#endif + +#include "mz_zip.h" + +#include /* tolower */ +#include /* snprintf */ + +#if defined(_MSC_VER) || defined(__MINGW32__) +# define localtime_r(t1,t2) (localtime_s(t2,t1) == 0 ? t1 : NULL) +#endif +#if defined(_MSC_VER) && (_MSC_VER < 1900) +# define snprintf _snprintf +#endif + +/***************************************************************************/ + +#define MZ_ZIP_MAGIC_LOCALHEADER (0x04034b50) +#define MZ_ZIP_MAGIC_LOCALHEADERU8 { 0x50, 0x4b, 0x03, 0x04 } +#define MZ_ZIP_MAGIC_CENTRALHEADER (0x02014b50) +#define MZ_ZIP_MAGIC_CENTRALHEADERU8 { 0x50, 0x4b, 0x01, 0x02 } +#define MZ_ZIP_MAGIC_ENDHEADER (0x06054b50) +#define MZ_ZIP_MAGIC_ENDHEADERU8 { 0x50, 0x4b, 0x05, 0x06 } +#define MZ_ZIP_MAGIC_ENDHEADER64 (0x06064b50) +#define MZ_ZIP_MAGIC_ENDLOCHEADER64 (0x07064b50) +#define MZ_ZIP_MAGIC_DATADESCRIPTOR (0x08074b50) +#define MZ_ZIP_MAGIC_DATADESCRIPTORU8 { 0x50, 0x4b, 0x07, 0x08 } + +#define MZ_ZIP_SIZE_LD_ITEM (30) +#define MZ_ZIP_SIZE_CD_ITEM (46) +#define MZ_ZIP_SIZE_CD_LOCATOR64 (20) +#define MZ_ZIP_SIZE_MAX_DATA_DESCRIPTOR (24) + +#define MZ_ZIP_OFFSET_CRC_SIZES (14) +#define MZ_ZIP_UNCOMPR_SIZE64_CUSHION (2 * 1024 * 1024) + +#ifndef MZ_ZIP_EOCD_MAX_BACK +#define MZ_ZIP_EOCD_MAX_BACK (1 << 20) +#endif + +/***************************************************************************/ + +typedef struct mz_zip_s { + mz_zip_file file_info; + mz_zip_file local_file_info; + + void *stream; /* main stream */ + void *cd_stream; /* pointer to the stream with the cd */ + void *cd_mem_stream; /* memory stream for central directory */ + void *compress_stream; /* compression stream */ + void *crypt_stream; /* encryption stream */ + void *file_info_stream; /* memory stream for storing file info */ + void *local_file_info_stream; /* memory stream for storing local file info */ + + int32_t open_mode; + uint8_t recover; + uint8_t data_descriptor; + + uint32_t disk_number_with_cd; /* number of the disk with the central dir */ + int64_t disk_offset_shift; /* correction for zips that have wrong offset start of cd */ + + int64_t cd_start_pos; /* pos of the first file in the central dir stream */ + int64_t cd_current_pos; /* pos of the current file in the central dir */ + int64_t cd_offset; /* offset of start of central directory */ + int64_t cd_size; /* size of the central directory */ + uint32_t cd_signature; /* signature of central directory */ + + uint8_t entry_scanned; /* entry header information read ok */ + uint8_t entry_opened; /* entry is open for read/write */ + uint8_t entry_raw; /* entry opened with raw mode */ + uint32_t entry_crc32; /* entry crc32 */ + + uint64_t number_entry; + + uint16_t version_madeby; + char *comment; +} mz_zip; + +/***************************************************************************/ + +#if 0 +# define mz_zip_print printf +#else +# define mz_zip_print(fmt,...) +#endif + +/***************************************************************************/ + +/* Locate the end of central directory */ +static int32_t mz_zip_search_eocd(void *stream, int64_t *central_pos) { + int64_t file_size = 0; + int64_t max_back = MZ_ZIP_EOCD_MAX_BACK; + uint8_t find[4] = MZ_ZIP_MAGIC_ENDHEADERU8; + int32_t err = MZ_OK; + + err = mz_stream_seek(stream, 0, MZ_SEEK_END); + if (err != MZ_OK) + return err; + + file_size = mz_stream_tell(stream); + + if (max_back <= 0 || max_back > file_size) + max_back = file_size; + + return mz_stream_find_reverse(stream, (const void *)find, sizeof(find), max_back, central_pos); +} + +/* Locate the end of central directory 64 of a zip file */ +static int32_t mz_zip_search_zip64_eocd(void *stream, const int64_t end_central_offset, int64_t *central_pos) { + int64_t offset = 0; + uint32_t value32 = 0; + int32_t err = MZ_OK; + + + *central_pos = 0; + + /* Zip64 end of central directory locator */ + err = mz_stream_seek(stream, end_central_offset - MZ_ZIP_SIZE_CD_LOCATOR64, MZ_SEEK_SET); + /* Read locator signature */ + if (err == MZ_OK) { + err = mz_stream_read_uint32(stream, &value32); + if (value32 != MZ_ZIP_MAGIC_ENDLOCHEADER64) + err = MZ_FORMAT_ERROR; + } + /* Number of the disk with the start of the zip64 end of central directory */ + if (err == MZ_OK) + err = mz_stream_read_uint32(stream, &value32); + /* Relative offset of the zip64 end of central directory record8 */ + if (err == MZ_OK) + err = mz_stream_read_uint64(stream, (uint64_t *)&offset); + /* Total number of disks */ + if (err == MZ_OK) + err = mz_stream_read_uint32(stream, &value32); + /* Goto end of central directory record */ + if (err == MZ_OK) + err = mz_stream_seek(stream, (int64_t)offset, MZ_SEEK_SET); + /* The signature */ + if (err == MZ_OK) { + err = mz_stream_read_uint32(stream, &value32); + if (value32 != MZ_ZIP_MAGIC_ENDHEADER64) + err = MZ_FORMAT_ERROR; + } + + if (err == MZ_OK) + *central_pos = offset; + + return err; +} + +/* Get PKWARE traditional encryption verifier */ +static uint16_t mz_zip_get_pk_verify(uint32_t dos_date, uint64_t crc, uint16_t flag) +{ + /* Info-ZIP modification to ZipCrypto format: if bit 3 of the general + * purpose bit flag is set, it uses high byte of 16-bit File Time. */ + if (flag & MZ_ZIP_FLAG_DATA_DESCRIPTOR) + return ((dos_date >> 16) & 0xff) << 8 | ((dos_date >> 8) & 0xff); + return ((crc >> 16) & 0xff) << 8 | ((crc >> 24) & 0xff); +} + +/* Get info about the current file in the zip file */ +static int32_t mz_zip_entry_read_header(void *stream, uint8_t local, mz_zip_file *file_info, void *file_extra_stream) { + uint64_t ntfs_time = 0; + uint32_t reserved = 0; + uint32_t magic = 0; + uint32_t dos_date = 0; + uint32_t field_pos = 0; + uint16_t field_type = 0; + uint16_t field_length = 0; + uint32_t field_length_read = 0; + uint16_t ntfs_attrib_id = 0; + uint16_t ntfs_attrib_size = 0; + uint16_t linkname_size; + uint16_t value16 = 0; + uint32_t value32 = 0; + int64_t extrafield_pos = 0; + int64_t comment_pos = 0; + int64_t linkname_pos = 0; + int64_t saved_pos = 0; + int32_t err = MZ_OK; + char *linkname = NULL; + + + memset(file_info, 0, sizeof(mz_zip_file)); + + /* Check the magic */ + err = mz_stream_read_uint32(stream, &magic); + if (err == MZ_END_OF_STREAM) + err = MZ_END_OF_LIST; + else if (magic == MZ_ZIP_MAGIC_ENDHEADER || magic == MZ_ZIP_MAGIC_ENDHEADER64) + err = MZ_END_OF_LIST; + else if ((local) && (magic != MZ_ZIP_MAGIC_LOCALHEADER)) + err = MZ_FORMAT_ERROR; + else if ((!local) && (magic != MZ_ZIP_MAGIC_CENTRALHEADER)) + err = MZ_FORMAT_ERROR; + + /* Read header fields */ + if (err == MZ_OK) { + if (!local) + err = mz_stream_read_uint16(stream, &file_info->version_madeby); + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->version_needed); + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->flag); + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->compression_method); + if (err == MZ_OK) { + err = mz_stream_read_uint32(stream, &dos_date); + file_info->modified_date = mz_zip_dosdate_to_time_t(dos_date); + } + if (err == MZ_OK) + err = mz_stream_read_uint32(stream, &file_info->crc); +#ifdef HAVE_PKCRYPT + if (err == MZ_OK && file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) { + /* Use dos_date from header instead of derived from time in zip extensions */ + file_info->pk_verify = mz_zip_get_pk_verify(dos_date, file_info->crc, file_info->flag); + } +#endif + if (err == MZ_OK) { + err = mz_stream_read_uint32(stream, &value32); + file_info->compressed_size = value32; + } + if (err == MZ_OK) { + err = mz_stream_read_uint32(stream, &value32); + file_info->uncompressed_size = value32; + } + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->filename_size); + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->extrafield_size); + if (!local) { + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->comment_size); + if (err == MZ_OK) { + err = mz_stream_read_uint16(stream, &value16); + file_info->disk_number = value16; + } + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &file_info->internal_fa); + if (err == MZ_OK) + err = mz_stream_read_uint32(stream, &file_info->external_fa); + if (err == MZ_OK) { + err = mz_stream_read_uint32(stream, &value32); + file_info->disk_offset = value32; + } + } + } + + if (err == MZ_OK) + err = mz_stream_seek(file_extra_stream, 0, MZ_SEEK_SET); + + /* Copy variable length data to memory stream for later retrieval */ + if ((err == MZ_OK) && (file_info->filename_size > 0)) + err = mz_stream_copy(file_extra_stream, stream, file_info->filename_size); + mz_stream_write_uint8(file_extra_stream, 0); + extrafield_pos = mz_stream_tell(file_extra_stream); + + if ((err == MZ_OK) && (file_info->extrafield_size > 0)) + err = mz_stream_copy(file_extra_stream, stream, file_info->extrafield_size); + mz_stream_write_uint8(file_extra_stream, 0); + + comment_pos = mz_stream_tell(file_extra_stream); + if ((err == MZ_OK) && (file_info->comment_size > 0)) + err = mz_stream_copy(file_extra_stream, stream, file_info->comment_size); + mz_stream_write_uint8(file_extra_stream, 0); + + linkname_pos = mz_stream_tell(file_extra_stream); + /* Overwrite if we encounter UNIX1 extra block */ + mz_stream_write_uint8(file_extra_stream, 0); + + if ((err == MZ_OK) && (file_info->extrafield_size > 0)) { + /* Seek to and parse the extra field */ + err = mz_stream_seek(file_extra_stream, extrafield_pos, MZ_SEEK_SET); + + while ((err == MZ_OK) && (field_pos + 4 <= file_info->extrafield_size)) { + err = mz_zip_extrafield_read(file_extra_stream, &field_type, &field_length); + if (err != MZ_OK) + break; + field_pos += 4; + + /* Don't allow field length to exceed size of remaining extrafield */ + if (field_length > (file_info->extrafield_size - field_pos)) + field_length = (uint16_t)(file_info->extrafield_size - field_pos); + + /* Read ZIP64 extra field */ + if ((field_type == MZ_ZIP_EXTENSION_ZIP64) && (field_length >= 8)) { + if ((err == MZ_OK) && (file_info->uncompressed_size == UINT32_MAX)) { + err = mz_stream_read_int64(file_extra_stream, &file_info->uncompressed_size); + if (file_info->uncompressed_size < 0) + err = MZ_FORMAT_ERROR; + } + if ((err == MZ_OK) && (file_info->compressed_size == UINT32_MAX)) { + err = mz_stream_read_int64(file_extra_stream, &file_info->compressed_size); + if (file_info->compressed_size < 0) + err = MZ_FORMAT_ERROR; + } + if ((err == MZ_OK) && (file_info->disk_offset == UINT32_MAX)) { + err = mz_stream_read_int64(file_extra_stream, &file_info->disk_offset); + if (file_info->disk_offset < 0) + err = MZ_FORMAT_ERROR; + } + if ((err == MZ_OK) && (file_info->disk_number == UINT16_MAX)) + err = mz_stream_read_uint32(file_extra_stream, &file_info->disk_number); + } + /* Read NTFS extra field */ + else if ((field_type == MZ_ZIP_EXTENSION_NTFS) && (field_length > 4)) { + if (err == MZ_OK) + err = mz_stream_read_uint32(file_extra_stream, &reserved); + field_length_read = 4; + + while ((err == MZ_OK) && (field_length_read + 4 <= field_length)) { + err = mz_stream_read_uint16(file_extra_stream, &ntfs_attrib_id); + if (err == MZ_OK) + err = mz_stream_read_uint16(file_extra_stream, &ntfs_attrib_size); + field_length_read += 4; + + if ((err == MZ_OK) && (ntfs_attrib_id == 0x01) && (ntfs_attrib_size == 24)) { + err = mz_stream_read_uint64(file_extra_stream, &ntfs_time); + mz_zip_ntfs_to_unix_time(ntfs_time, &file_info->modified_date); + + if (err == MZ_OK) { + err = mz_stream_read_uint64(file_extra_stream, &ntfs_time); + mz_zip_ntfs_to_unix_time(ntfs_time, &file_info->accessed_date); + } + if (err == MZ_OK) { + err = mz_stream_read_uint64(file_extra_stream, &ntfs_time); + mz_zip_ntfs_to_unix_time(ntfs_time, &file_info->creation_date); + } + } else if ((err == MZ_OK) && (field_length_read + ntfs_attrib_size <= field_length)) { + err = mz_stream_seek(file_extra_stream, ntfs_attrib_size, MZ_SEEK_CUR); + } + + field_length_read += ntfs_attrib_size; + } + } + /* Read UNIX1 extra field */ + else if ((field_type == MZ_ZIP_EXTENSION_UNIX1) && (field_length >= 12)) { + if (err == MZ_OK) { + err = mz_stream_read_uint32(file_extra_stream, &value32); + if (err == MZ_OK && file_info->accessed_date == 0) + file_info->accessed_date = value32; + } + if (err == MZ_OK) { + err = mz_stream_read_uint32(file_extra_stream, &value32); + if (err == MZ_OK && file_info->modified_date == 0) + file_info->modified_date = value32; + } + if (err == MZ_OK) + err = mz_stream_read_uint16(file_extra_stream, &value16); /* User id */ + if (err == MZ_OK) + err = mz_stream_read_uint16(file_extra_stream, &value16); /* Group id */ + + /* Copy linkname to end of file extra stream so we can return null + terminated string */ + linkname_size = field_length - 12; + if ((err == MZ_OK) && (linkname_size > 0)) { + linkname = (char *)MZ_ALLOC(linkname_size); + if (linkname != NULL) { + if (mz_stream_read(file_extra_stream, linkname, linkname_size) != linkname_size) + err = MZ_READ_ERROR; + if (err == MZ_OK) { + saved_pos = mz_stream_tell(file_extra_stream); + + mz_stream_seek(file_extra_stream, linkname_pos, MZ_SEEK_SET); + mz_stream_write(file_extra_stream, linkname, linkname_size); + mz_stream_write_uint8(file_extra_stream, 0); + + mz_stream_seek(file_extra_stream, saved_pos, MZ_SEEK_SET); + } + MZ_FREE(linkname); + } + } + } +#ifdef HAVE_WZAES + /* Read AES extra field */ + else if ((field_type == MZ_ZIP_EXTENSION_AES) && (field_length == 7)) { + uint8_t value8 = 0; + /* Verify version info */ + err = mz_stream_read_uint16(file_extra_stream, &value16); + /* Support AE-1 and AE-2 */ + if (value16 != 1 && value16 != 2) + err = MZ_FORMAT_ERROR; + file_info->aes_version = value16; + if (err == MZ_OK) + err = mz_stream_read_uint8(file_extra_stream, &value8); + if ((char)value8 != 'A') + err = MZ_FORMAT_ERROR; + if (err == MZ_OK) + err = mz_stream_read_uint8(file_extra_stream, &value8); + if ((char)value8 != 'E') + err = MZ_FORMAT_ERROR; + /* Get AES encryption strength and actual compression method */ + if (err == MZ_OK) { + err = mz_stream_read_uint8(file_extra_stream, &value8); + file_info->aes_encryption_mode = value8; + } + if (err == MZ_OK) { + err = mz_stream_read_uint16(file_extra_stream, &value16); + file_info->compression_method = value16; + } + } +#endif + else if (field_length > 0) { + err = mz_stream_seek(file_extra_stream, field_length, MZ_SEEK_CUR); + } + + field_pos += field_length; + } + } + + /* Get pointers to variable length data */ + mz_stream_mem_get_buffer(file_extra_stream, (const void **)&file_info->filename); + mz_stream_mem_get_buffer_at(file_extra_stream, extrafield_pos, (const void **)&file_info->extrafield); + mz_stream_mem_get_buffer_at(file_extra_stream, comment_pos, (const void **)&file_info->comment); + mz_stream_mem_get_buffer_at(file_extra_stream, linkname_pos, (const void **)&file_info->linkname); + + /* Set to empty string just in-case */ + if (file_info->filename == NULL) + file_info->filename = ""; + if (file_info->extrafield == NULL) + file_info->extrafield_size = 0; + if (file_info->comment == NULL) + file_info->comment = ""; + if (file_info->linkname == NULL) + file_info->linkname = ""; + + if (err == MZ_OK) { + mz_zip_print("Zip - Entry - Read header - %s (local %" PRId8 ")\n", + file_info->filename, local); + mz_zip_print("Zip - Entry - Read header compress (ucs %" PRId64 " cs %" PRId64 " crc 0x%08" PRIx32 ")\n", + file_info->uncompressed_size, file_info->compressed_size, file_info->crc); + if (!local) { + mz_zip_print("Zip - Entry - Read header disk (disk %" PRIu32 " offset %" PRId64 ")\n", + file_info->disk_number, file_info->disk_offset); + } + mz_zip_print("Zip - Entry - Read header variable (fnl %" PRId32 " efs %" PRId32 " cms %" PRId32 ")\n", + file_info->filename_size, file_info->extrafield_size, file_info->comment_size); + } + + return err; +} + +static int32_t mz_zip_entry_read_descriptor(void *stream, uint8_t zip64, uint32_t *crc32, int64_t *compressed_size, int64_t *uncompressed_size) { + uint32_t value32 = 0; + int64_t value64 = 0; + int32_t err = MZ_OK; + + + err = mz_stream_read_uint32(stream, &value32); + if (value32 != MZ_ZIP_MAGIC_DATADESCRIPTOR) + err = MZ_FORMAT_ERROR; + if (err == MZ_OK) + err = mz_stream_read_uint32(stream, &value32); + if ((err == MZ_OK) && (crc32 != NULL)) + *crc32 = value32; + if (err == MZ_OK) { + /* If zip 64 extension is enabled then read as 8 byte */ + if (!zip64) { + err = mz_stream_read_uint32(stream, &value32); + value64 = value32; + } else { + err = mz_stream_read_int64(stream, &value64); + if (value64 < 0) + err = MZ_FORMAT_ERROR; + } + if ((err == MZ_OK) && (compressed_size != NULL)) + *compressed_size = value64; + } + if (err == MZ_OK) { + if (!zip64) { + err = mz_stream_read_uint32(stream, &value32); + value64 = value32; + } else { + err = mz_stream_read_int64(stream, &value64); + if (value64 < 0) + err = MZ_FORMAT_ERROR; + } + if ((err == MZ_OK) && (uncompressed_size != NULL)) + *uncompressed_size = value64; + } + + return err; +} + +static int32_t mz_zip_entry_write_crc_sizes(void *stream, uint8_t zip64, uint8_t mask, mz_zip_file *file_info) { + int32_t err = MZ_OK; + + if (mask) + err = mz_stream_write_uint32(stream, 0); + else + err = mz_stream_write_uint32(stream, file_info->crc); /* crc */ + + /* For backwards-compatibility with older zip applications we set all sizes to UINT32_MAX + * when zip64 is needed, instead of only setting sizes larger than UINT32_MAX. */ + + if (err == MZ_OK) { + if (zip64) /* compr size */ + err = mz_stream_write_uint32(stream, UINT32_MAX); + else + err = mz_stream_write_uint32(stream, (uint32_t)file_info->compressed_size); + } + if (err == MZ_OK) { + if (mask) /* uncompr size */ + err = mz_stream_write_uint32(stream, 0); + else if (zip64) + err = mz_stream_write_uint32(stream, UINT32_MAX); + else + err = mz_stream_write_uint32(stream, (uint32_t)file_info->uncompressed_size); + } + return err; +} + +static int32_t mz_zip_entry_needs_zip64(mz_zip_file *file_info, uint8_t local, uint8_t *zip64) { + uint32_t max_uncompressed_size = UINT32_MAX; + uint8_t needs_zip64 = 0; + + if (zip64 == NULL) + return MZ_PARAM_ERROR; + + *zip64 = 0; + + if (local) { + /* At local header we might not know yet whether compressed size will overflow unsigned + 32-bit integer which might happen for high entropy data so we give it some cushion */ + + max_uncompressed_size -= MZ_ZIP_UNCOMPR_SIZE64_CUSHION; + } + + needs_zip64 = (file_info->uncompressed_size >= max_uncompressed_size) || + (file_info->compressed_size >= UINT32_MAX); + + if (!local) { + /* Disk offset and number only used in central directory header */ + needs_zip64 |= (file_info->disk_offset >= UINT32_MAX) || + (file_info->disk_number >= UINT16_MAX); + } + + if (file_info->zip64 == MZ_ZIP64_AUTO) { + /* If uncompressed size is unknown, assume zip64 for 64-bit data descriptors */ + if (local && file_info->uncompressed_size == 0) { + /* Don't use zip64 for local header directory entries */ + if (mz_zip_attrib_is_dir(file_info->external_fa, file_info->version_madeby) != MZ_OK) { + *zip64 = 1; + } + } + *zip64 |= needs_zip64; + } else if (file_info->zip64 == MZ_ZIP64_FORCE) { + *zip64 = 1; + } else if (file_info->zip64 == MZ_ZIP64_DISABLE) { + /* Zip64 extension is required to zip file */ + if (needs_zip64) + return MZ_PARAM_ERROR; + } + + return MZ_OK; +} + +static int32_t mz_zip_entry_write_header(void *stream, uint8_t local, mz_zip_file *file_info) { + uint64_t ntfs_time = 0; + uint32_t reserved = 0; + uint32_t dos_date = 0; + uint16_t extrafield_size = 0; + uint16_t field_type = 0; + uint16_t field_length = 0; + uint16_t field_length_zip64 = 0; + uint16_t field_length_ntfs = 0; + uint16_t field_length_aes = 0; + uint16_t field_length_unix1 = 0; + uint16_t filename_size = 0; + uint16_t filename_length = 0; + uint16_t linkname_size = 0; + uint16_t version_needed = 0; + int32_t comment_size = 0; + int32_t err = MZ_OK; + int32_t err_mem = MZ_OK; + uint8_t zip64 = 0; + uint8_t skip_aes = 0; + uint8_t mask = 0; + uint8_t write_end_slash = 0; + const char *filename = NULL; + char masked_name[64]; + void *file_extra_stream = NULL; + + if (file_info == NULL) + return MZ_PARAM_ERROR; + + if ((local) && (file_info->flag & MZ_ZIP_FLAG_MASK_LOCAL_INFO)) + mask = 1; + + /* Determine if zip64 extra field is necessary */ + err = mz_zip_entry_needs_zip64(file_info, local, &zip64); + if (err != MZ_OK) + return err; + + /* Start calculating extra field sizes */ + if (zip64) { + /* Both compressed and uncompressed sizes must be included (at least in local header) */ + field_length_zip64 = 8 + 8; + if ((!local) && (file_info->disk_offset >= UINT32_MAX)) + field_length_zip64 += 8; + + extrafield_size += 4; + extrafield_size += field_length_zip64; + } + + /* Calculate extra field size and check for duplicates */ + if (file_info->extrafield_size > 0) { + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_set_buffer(file_extra_stream, (void *)file_info->extrafield, + file_info->extrafield_size); + + do { + err_mem = mz_stream_read_uint16(file_extra_stream, &field_type); + if (err_mem == MZ_OK) + err_mem = mz_stream_read_uint16(file_extra_stream, &field_length); + if (err_mem != MZ_OK) + break; + + /* Prefer incoming aes extensions over ours */ + if (field_type == MZ_ZIP_EXTENSION_AES) + skip_aes = 1; + + /* Prefer our zip64, ntfs, unix1 extension over incoming */ + if (field_type != MZ_ZIP_EXTENSION_ZIP64 && field_type != MZ_ZIP_EXTENSION_NTFS && + field_type != MZ_ZIP_EXTENSION_UNIX1) + extrafield_size += 4 + field_length; + + if (err_mem == MZ_OK) + err_mem = mz_stream_seek(file_extra_stream, field_length, MZ_SEEK_CUR); + } while (err_mem == MZ_OK); + } + +#ifdef HAVE_WZAES + if (!skip_aes) { + if ((file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) && (file_info->aes_version)) { + field_length_aes = 1 + 1 + 1 + 2 + 2; + extrafield_size += 4 + field_length_aes; + } + } +#else + MZ_UNUSED(field_length_aes); + MZ_UNUSED(skip_aes); +#endif + /* NTFS timestamps */ + if ((file_info->modified_date != 0) && + (file_info->accessed_date != 0) && + (file_info->creation_date != 0) && (!mask)) { + field_length_ntfs = 8 + 8 + 8 + 4 + 2 + 2; + extrafield_size += 4 + field_length_ntfs; + } + + /* Unix1 symbolic links */ + if (file_info->linkname != NULL && *file_info->linkname != 0) { + linkname_size = (uint16_t)strlen(file_info->linkname); + field_length_unix1 = 12 + linkname_size; + extrafield_size += 4 + field_length_unix1; + } + + if (local) + err = mz_stream_write_uint32(stream, MZ_ZIP_MAGIC_LOCALHEADER); + else { + err = mz_stream_write_uint32(stream, MZ_ZIP_MAGIC_CENTRALHEADER); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, file_info->version_madeby); + } + + /* Calculate version needed to extract */ + if (err == MZ_OK) { + version_needed = file_info->version_needed; + if (version_needed == 0) { + version_needed = 20; + if (zip64) + version_needed = 45; +#ifdef HAVE_WZAES + if ((file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) && (file_info->aes_version)) + version_needed = 51; +#endif +#if defined(HAVE_LZMA) || defined(HAVE_LIBCOMP) + if ((file_info->compression_method == MZ_COMPRESS_METHOD_LZMA) || + (file_info->compression_method == MZ_COMPRESS_METHOD_XZ)) + version_needed = 63; +#endif + } + err = mz_stream_write_uint16(stream, version_needed); + } + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, file_info->flag); + if (err == MZ_OK) { +#ifdef HAVE_WZAES + if ((file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) && (file_info->aes_version)) + err = mz_stream_write_uint16(stream, MZ_COMPRESS_METHOD_AES); + else +#endif + err = mz_stream_write_uint16(stream, file_info->compression_method); + } + if (err == MZ_OK) { + if (file_info->modified_date != 0 && !mask) + dos_date = mz_zip_time_t_to_dos_date(file_info->modified_date); + err = mz_stream_write_uint32(stream, dos_date); + } + + if (err == MZ_OK) + err = mz_zip_entry_write_crc_sizes(stream, zip64, mask, file_info); + + if (mask) { + snprintf(masked_name, sizeof(masked_name), "%" PRIx32 "_%" PRIx64, + file_info->disk_number, file_info->disk_offset); + filename = masked_name; + } else { + filename = file_info->filename; + } + + filename_length = (uint16_t)strlen(filename); + filename_size += filename_length; + + if ((mz_zip_attrib_is_dir(file_info->external_fa, file_info->version_madeby) == MZ_OK) && + ((filename[filename_length - 1] != '/') && (filename[filename_length - 1] != '\\'))) { + filename_size += 1; + write_end_slash = 1; + } + + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, filename_size); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, extrafield_size); + + if (!local) { + if (file_info->comment != NULL) { + comment_size = (int32_t)strlen(file_info->comment); + if (comment_size > UINT16_MAX) + comment_size = UINT16_MAX; + } + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, (uint16_t)comment_size); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, (uint16_t)file_info->disk_number); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, file_info->internal_fa); + if (err == MZ_OK) + err = mz_stream_write_uint32(stream, file_info->external_fa); + if (err == MZ_OK) { + if (file_info->disk_offset >= UINT32_MAX) + err = mz_stream_write_uint32(stream, UINT32_MAX); + else + err = mz_stream_write_uint32(stream, (uint32_t)file_info->disk_offset); + } + } + + if (err == MZ_OK) { + if (mz_stream_write(stream, filename, filename_length) != filename_length) + err = MZ_WRITE_ERROR; + + /* Ensure that directories have a slash appended to them for compatibility */ + if (err == MZ_OK && write_end_slash) + err = mz_stream_write_uint8(stream, '/'); + } + + /* Write ZIP64 extra field first so we can update sizes later if data descriptor not used */ + if ((err == MZ_OK) && (zip64)) { + err = mz_zip_extrafield_write(stream, MZ_ZIP_EXTENSION_ZIP64, field_length_zip64); + if (err == MZ_OK) { + if (mask) + err = mz_stream_write_int64(stream, 0); + else + err = mz_stream_write_int64(stream, file_info->uncompressed_size); + } + if (err == MZ_OK) + err = mz_stream_write_int64(stream, file_info->compressed_size); + if ((err == MZ_OK) && (!local) && (file_info->disk_offset >= UINT32_MAX)) + err = mz_stream_write_int64(stream, file_info->disk_offset); + if ((err == MZ_OK) && (!local) && (file_info->disk_number >= UINT16_MAX)) + err = mz_stream_write_uint32(stream, file_info->disk_number); + } + /* Write NTFS extra field */ + if ((err == MZ_OK) && (field_length_ntfs > 0)) { + err = mz_zip_extrafield_write(stream, MZ_ZIP_EXTENSION_NTFS, field_length_ntfs); + if (err == MZ_OK) + err = mz_stream_write_uint32(stream, reserved); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, 0x01); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, field_length_ntfs - 8); + if (err == MZ_OK) { + mz_zip_unix_to_ntfs_time(file_info->modified_date, &ntfs_time); + err = mz_stream_write_uint64(stream, ntfs_time); + } + if (err == MZ_OK) { + mz_zip_unix_to_ntfs_time(file_info->accessed_date, &ntfs_time); + err = mz_stream_write_uint64(stream, ntfs_time); + } + if (err == MZ_OK) { + mz_zip_unix_to_ntfs_time(file_info->creation_date, &ntfs_time); + err = mz_stream_write_uint64(stream, ntfs_time); + } + } + /* Write UNIX extra block extra field */ + if ((err == MZ_OK) && (field_length_unix1 > 0)) { + err = mz_zip_extrafield_write(stream, MZ_ZIP_EXTENSION_UNIX1, field_length_unix1); + if (err == MZ_OK) + err = mz_stream_write_uint32(stream, (uint32_t)file_info->accessed_date); + if (err == MZ_OK) + err = mz_stream_write_uint32(stream, (uint32_t)file_info->modified_date); + if (err == MZ_OK) /* User id */ + err = mz_stream_write_uint16(stream, 0); + if (err == MZ_OK) /* Group id */ + err = mz_stream_write_uint16(stream, 0); + if (err == MZ_OK && linkname_size > 0) { + if (mz_stream_write(stream, file_info->linkname, linkname_size) != linkname_size) + err = MZ_WRITE_ERROR; + } + } +#ifdef HAVE_WZAES + /* Write AES extra field */ + if ((err == MZ_OK) && (!skip_aes) && (file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) && (file_info->aes_version)) { + err = mz_zip_extrafield_write(stream, MZ_ZIP_EXTENSION_AES, field_length_aes); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, file_info->aes_version); + if (err == MZ_OK) + err = mz_stream_write_uint8(stream, 'A'); + if (err == MZ_OK) + err = mz_stream_write_uint8(stream, 'E'); + if (err == MZ_OK) + err = mz_stream_write_uint8(stream, file_info->aes_encryption_mode); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, file_info->compression_method); + } +#endif + + if (file_info->extrafield_size > 0) { + err_mem = mz_stream_mem_seek(file_extra_stream, 0, MZ_SEEK_SET); + while (err == MZ_OK && err_mem == MZ_OK) { + err_mem = mz_stream_read_uint16(file_extra_stream, &field_type); + if (err_mem == MZ_OK) + err_mem = mz_stream_read_uint16(file_extra_stream, &field_length); + if (err_mem != MZ_OK) + break; + + /* Prefer our zip 64, ntfs, unix1 extensions over incoming */ + if (field_type == MZ_ZIP_EXTENSION_ZIP64 || field_type == MZ_ZIP_EXTENSION_NTFS || + field_type == MZ_ZIP_EXTENSION_UNIX1) { + err_mem = mz_stream_seek(file_extra_stream, field_length, MZ_SEEK_CUR); + continue; + } + + err = mz_stream_write_uint16(stream, field_type); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, field_length); + if (err == MZ_OK) + err = mz_stream_copy(stream, file_extra_stream, field_length); + } + + mz_stream_mem_delete(&file_extra_stream); + } + + if ((err == MZ_OK) && (!local) && (file_info->comment != NULL)) { + if (mz_stream_write(stream, file_info->comment, file_info->comment_size) != file_info->comment_size) + err = MZ_WRITE_ERROR; + } + + return err; +} + +static int32_t mz_zip_entry_write_descriptor(void *stream, uint8_t zip64, uint32_t crc32, int64_t compressed_size, int64_t uncompressed_size) { + int32_t err = MZ_OK; + + err = mz_stream_write_uint32(stream, MZ_ZIP_MAGIC_DATADESCRIPTOR); + if (err == MZ_OK) + err = mz_stream_write_uint32(stream, crc32); + + /* Store data descriptor as 8 bytes if zip 64 extension enabled */ + if (err == MZ_OK) { + /* Zip 64 extension is enabled when uncompressed size is > UINT32_MAX */ + if (!zip64) + err = mz_stream_write_uint32(stream, (uint32_t)compressed_size); + else + err = mz_stream_write_int64(stream, compressed_size); + } + if (err == MZ_OK) { + if (!zip64) + err = mz_stream_write_uint32(stream, (uint32_t)uncompressed_size); + else + err = mz_stream_write_int64(stream, uncompressed_size); + } + + return err; +} + +static int32_t mz_zip_read_cd(void *handle) { + mz_zip *zip = (mz_zip *)handle; + uint64_t number_entry_cd64 = 0; + uint64_t number_entry_cd = 0; + int64_t eocd_pos = 0; + int64_t eocd_pos64 = 0; + int64_t value64i = 0; + uint16_t value16 = 0; + uint32_t value32 = 0; + uint64_t value64 = 0; + uint16_t comment_size = 0; + int32_t comment_read = 0; + int32_t err = MZ_OK; + + + if (zip == NULL) + return MZ_PARAM_ERROR; + + /* Read and cache central directory records */ + err = mz_zip_search_eocd(zip->stream, &eocd_pos); + if (err == MZ_OK) { + /* The signature, already checked */ + err = mz_stream_read_uint32(zip->stream, &value32); + /* Number of this disk */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &value16); + /* Number of the disk with the start of the central directory */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &value16); + zip->disk_number_with_cd = value16; + /* Total number of entries in the central dir on this disk */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &value16); + zip->number_entry = value16; + /* Total number of entries in the central dir */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &value16); + number_entry_cd = value16; + if (number_entry_cd != zip->number_entry) + err = MZ_FORMAT_ERROR; + /* Size of the central directory */ + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &value32); + if (err == MZ_OK) + zip->cd_size = value32; + /* Offset of start of central directory with respect to the starting disk number */ + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &value32); + if (err == MZ_OK) + zip->cd_offset = value32; + /* Zip file global comment length */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &comment_size); + if ((err == MZ_OK) && (comment_size > 0)) { + zip->comment = (char *)MZ_ALLOC(comment_size + 1); + if (zip->comment != NULL) { + comment_read = mz_stream_read(zip->stream, zip->comment, comment_size); + /* Don't fail if incorrect comment length read, not critical */ + if (comment_read < 0) + comment_read = 0; + zip->comment[comment_read] = 0; + } + } + + if ((err == MZ_OK) && ((number_entry_cd == UINT16_MAX) || (zip->cd_offset == UINT32_MAX))) { + /* Format should be Zip64, as the central directory or file size is too large */ + if (mz_zip_search_zip64_eocd(zip->stream, eocd_pos, &eocd_pos64) == MZ_OK) { + eocd_pos = eocd_pos64; + + err = mz_stream_seek(zip->stream, eocd_pos, MZ_SEEK_SET); + /* The signature, already checked */ + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &value32); + /* Size of zip64 end of central directory record */ + if (err == MZ_OK) + err = mz_stream_read_uint64(zip->stream, &value64); + /* Version made by */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &zip->version_madeby); + /* Version needed to extract */ + if (err == MZ_OK) + err = mz_stream_read_uint16(zip->stream, &value16); + /* Number of this disk */ + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &value32); + /* Number of the disk with the start of the central directory */ + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &zip->disk_number_with_cd); + /* Total number of entries in the central directory on this disk */ + if (err == MZ_OK) + err = mz_stream_read_uint64(zip->stream, &zip->number_entry); + /* Total number of entries in the central directory */ + if (err == MZ_OK) + err = mz_stream_read_uint64(zip->stream, &number_entry_cd64); + if (zip->number_entry != number_entry_cd64) + err = MZ_FORMAT_ERROR; + /* Size of the central directory */ + if (err == MZ_OK) { + err = mz_stream_read_int64(zip->stream, &zip->cd_size); + if (zip->cd_size < 0) + err = MZ_FORMAT_ERROR; + } + /* Offset of start of central directory with respect to the starting disk number */ + if (err == MZ_OK) { + err = mz_stream_read_int64(zip->stream, &zip->cd_offset); + if (zip->cd_offset < 0) + err = MZ_FORMAT_ERROR; + } + } else if ((zip->number_entry == UINT16_MAX) || (number_entry_cd != zip->number_entry) || + (zip->cd_size == UINT16_MAX) || (zip->cd_offset == UINT32_MAX)) { + err = MZ_FORMAT_ERROR; + } + } + } + + if (err == MZ_OK) { + mz_zip_print("Zip - Read cd (disk %" PRId32 " entries %" PRId64 " offset %" PRId64 " size %" PRId64 ")\n", + zip->disk_number_with_cd, zip->number_entry, zip->cd_offset, zip->cd_size); + + /* Verify central directory signature exists at offset */ + err = mz_stream_seek(zip->stream, zip->cd_offset, MZ_SEEK_SET); + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &zip->cd_signature); + if ((err == MZ_OK) && (zip->cd_signature != MZ_ZIP_MAGIC_CENTRALHEADER)) { + /* If cd exists in large file and no zip-64 support, error for recover */ + if (eocd_pos > UINT32_MAX && eocd_pos64 == 0) + err = MZ_FORMAT_ERROR; + /* If cd not found attempt to seek backward to find it */ + if (err == MZ_OK) + err = mz_stream_seek(zip->stream, eocd_pos - zip->cd_size, MZ_SEEK_SET); + if (err == MZ_OK) + err = mz_stream_read_uint32(zip->stream, &zip->cd_signature); + if ((err == MZ_OK) && (zip->cd_signature == MZ_ZIP_MAGIC_CENTRALHEADER)) { + + /* If found compensate for incorrect locations */ + value64i = zip->cd_offset; + zip->cd_offset = eocd_pos - zip->cd_size; + /* Assume disk has prepended data */ + zip->disk_offset_shift = zip->cd_offset - value64i; + } + } + } + + if (err == MZ_OK) { + if (eocd_pos < zip->cd_offset) { + /* End of central dir should always come after central dir */ + err = MZ_FORMAT_ERROR; + } else if ((uint64_t)eocd_pos < (uint64_t)zip->cd_offset + zip->cd_size) { + /* Truncate size of cd if incorrect size or offset provided */ + zip->cd_size = eocd_pos - zip->cd_offset; + } + } + + return err; +} + +static int32_t mz_zip_write_cd(void *handle) { + mz_zip *zip = (mz_zip *)handle; + int64_t zip64_eocd_pos_inzip = 0; + int64_t disk_number = 0; + int64_t disk_size = 0; + int32_t comment_size = 0; + int32_t err = MZ_OK; + + + if (zip == NULL) + return MZ_PARAM_ERROR; + + if (mz_stream_get_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, &disk_number) == MZ_OK) + zip->disk_number_with_cd = (uint32_t)disk_number; + if (mz_stream_get_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_SIZE, &disk_size) == MZ_OK && disk_size > 0) + zip->disk_number_with_cd += 1; + mz_stream_set_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, -1); + if ((zip->disk_number_with_cd > 0) && (zip->open_mode & MZ_OPEN_MODE_APPEND)) { + // Overwrite existing central directory if using split disks + mz_stream_seek(zip->stream, 0, MZ_SEEK_SET); + } + + zip->cd_offset = mz_stream_tell(zip->stream); + mz_stream_seek(zip->cd_mem_stream, 0, MZ_SEEK_END); + zip->cd_size = (uint32_t)mz_stream_tell(zip->cd_mem_stream); + mz_stream_seek(zip->cd_mem_stream, 0, MZ_SEEK_SET); + + err = mz_stream_copy(zip->stream, zip->cd_mem_stream, (int32_t)zip->cd_size); + + mz_zip_print("Zip - Write cd (disk %" PRId32 " entries %" PRId64 " offset %" PRId64 " size %" PRId64 ")\n", + zip->disk_number_with_cd, zip->number_entry, zip->cd_offset, zip->cd_size); + + if (zip->cd_size == 0 && zip->number_entry > 0) { + // Zip does not contain central directory, open with recovery option + return MZ_FORMAT_ERROR; + } + + /* Write the ZIP64 central directory header */ + if (zip->cd_offset >= UINT32_MAX || zip->number_entry >= UINT16_MAX) { + zip64_eocd_pos_inzip = mz_stream_tell(zip->stream); + + err = mz_stream_write_uint32(zip->stream, MZ_ZIP_MAGIC_ENDHEADER64); + + /* Size of this 'zip64 end of central directory' */ + if (err == MZ_OK) + err = mz_stream_write_uint64(zip->stream, (uint64_t)44); + /* Version made by */ + if (err == MZ_OK) + err = mz_stream_write_uint16(zip->stream, zip->version_madeby); + /* Version needed */ + if (err == MZ_OK) + err = mz_stream_write_uint16(zip->stream, (uint16_t)45); + /* Number of this disk */ + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, zip->disk_number_with_cd); + /* Number of the disk with the start of the central directory */ + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, zip->disk_number_with_cd); + /* Total number of entries in the central dir on this disk */ + if (err == MZ_OK) + err = mz_stream_write_uint64(zip->stream, zip->number_entry); + /* Total number of entries in the central dir */ + if (err == MZ_OK) + err = mz_stream_write_uint64(zip->stream, zip->number_entry); + /* Size of the central directory */ + if (err == MZ_OK) + err = mz_stream_write_int64(zip->stream, zip->cd_size); + /* Offset of start of central directory with respect to the starting disk number */ + if (err == MZ_OK) + err = mz_stream_write_int64(zip->stream, zip->cd_offset); + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, MZ_ZIP_MAGIC_ENDLOCHEADER64); + + /* Number of the disk with the start of the central directory */ + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, zip->disk_number_with_cd); + /* Relative offset to the end of zip64 central directory */ + if (err == MZ_OK) + err = mz_stream_write_int64(zip->stream, zip64_eocd_pos_inzip); + /* Number of the disk with the start of the central directory */ + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, zip->disk_number_with_cd + 1); + } + + /* Write the central directory header */ + + /* Signature */ + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, MZ_ZIP_MAGIC_ENDHEADER); + /* Number of this disk */ + if (err == MZ_OK) + err = mz_stream_write_uint16(zip->stream, (uint16_t)zip->disk_number_with_cd); + /* Number of the disk with the start of the central directory */ + if (err == MZ_OK) + err = mz_stream_write_uint16(zip->stream, (uint16_t)zip->disk_number_with_cd); + /* Total number of entries in the central dir on this disk */ + if (err == MZ_OK) { + if (zip->number_entry >= UINT16_MAX) + err = mz_stream_write_uint16(zip->stream, UINT16_MAX); + else + err = mz_stream_write_uint16(zip->stream, (uint16_t)zip->number_entry); + } + /* Total number of entries in the central dir */ + if (err == MZ_OK) { + if (zip->number_entry >= UINT16_MAX) + err = mz_stream_write_uint16(zip->stream, UINT16_MAX); + else + err = mz_stream_write_uint16(zip->stream, (uint16_t)zip->number_entry); + } + /* Size of the central directory */ + if (err == MZ_OK) + err = mz_stream_write_uint32(zip->stream, (uint32_t)zip->cd_size); + /* Offset of start of central directory with respect to the starting disk number */ + if (err == MZ_OK) { + if (zip->cd_offset >= UINT32_MAX) + err = mz_stream_write_uint32(zip->stream, UINT32_MAX); + else + err = mz_stream_write_uint32(zip->stream, (uint32_t)zip->cd_offset); + } + + /* Write global comment */ + if (zip->comment != NULL) { + comment_size = (int32_t)strlen(zip->comment); + if (comment_size > UINT16_MAX) + comment_size = UINT16_MAX; + } + if (err == MZ_OK) + err = mz_stream_write_uint16(zip->stream, (uint16_t)comment_size); + if (err == MZ_OK) { + if (mz_stream_write(zip->stream, zip->comment, comment_size) != comment_size) + err = MZ_READ_ERROR; + } + return err; +} + +static int32_t mz_zip_recover_cd(void *handle) { + mz_zip *zip = (mz_zip *)handle; + mz_zip_file local_file_info; + void *local_file_info_stream = NULL; + void *cd_mem_stream = NULL; + uint64_t number_entry = 0; + int64_t descriptor_pos = 0; + int64_t next_header_pos = 0; + int64_t disk_offset = 0; + int64_t disk_number = 0; + int64_t compressed_pos = 0; + int64_t compressed_end_pos = 0; + int64_t compressed_size = 0; + int64_t uncompressed_size = 0; + uint8_t descriptor_magic[4] = MZ_ZIP_MAGIC_DATADESCRIPTORU8; + uint8_t local_header_magic[4] = MZ_ZIP_MAGIC_LOCALHEADERU8; + uint8_t central_header_magic[4] = MZ_ZIP_MAGIC_CENTRALHEADERU8; + uint32_t crc32 = 0; + int32_t disk_number_with_cd = 0; + int32_t err = MZ_OK; + uint8_t zip64 = 0; + uint8_t eof = 0; + + + mz_zip_print("Zip - Recover - Start\n"); + + mz_zip_get_cd_mem_stream(handle, &cd_mem_stream); + + /* Determine if we are on a split disk or not */ + mz_stream_set_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, 0); + if (mz_stream_tell(zip->stream) < 0) { + mz_stream_set_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, -1); + mz_stream_seek(zip->stream, 0, MZ_SEEK_SET); + } else + disk_number_with_cd = 1; + + if (mz_stream_is_open(cd_mem_stream) != MZ_OK) + err = mz_stream_mem_open(cd_mem_stream, NULL, MZ_OPEN_MODE_CREATE); + + mz_stream_mem_create(&local_file_info_stream); + mz_stream_mem_open(local_file_info_stream, NULL, MZ_OPEN_MODE_CREATE); + + if (err == MZ_OK) { + err = mz_stream_find(zip->stream, (const void *)local_header_magic, sizeof(local_header_magic), + INT64_MAX, &next_header_pos); + } + + while (err == MZ_OK && !eof) { + /* Get current offset and disk number for central dir record */ + disk_offset = mz_stream_tell(zip->stream); + mz_stream_get_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, &disk_number); + + /* Read local headers */ + memset(&local_file_info, 0, sizeof(local_file_info)); + err = mz_zip_entry_read_header(zip->stream, 1, &local_file_info, local_file_info_stream); + if (err != MZ_OK) + break; + + local_file_info.disk_offset = disk_offset; + if (disk_number < 0) + disk_number = 0; + local_file_info.disk_number = (uint32_t)disk_number; + + compressed_pos = mz_stream_tell(zip->stream); + + if ((err == MZ_OK) && (local_file_info.compressed_size > 0)) { + mz_stream_seek(zip->stream, local_file_info.compressed_size, MZ_SEEK_CUR); + } + + for (;;) { + /* Search for the next local header */ + err = mz_stream_find(zip->stream, (const void *)local_header_magic, sizeof(local_header_magic), + INT64_MAX, &next_header_pos); + + if (err == MZ_EXIST_ERROR) { + mz_stream_seek(zip->stream, compressed_pos, MZ_SEEK_SET); + + /* Search for central dir if no local header found */ + err = mz_stream_find(zip->stream, (const void *)central_header_magic, sizeof(central_header_magic), + INT64_MAX, &next_header_pos); + + if (err == MZ_EXIST_ERROR) { + /* Get end of stream if no central header found */ + mz_stream_seek(zip->stream, 0, MZ_SEEK_END); + next_header_pos = mz_stream_tell(zip->stream); + } + + eof = 1; + } + + if (local_file_info.flag & MZ_ZIP_FLAG_DATA_DESCRIPTOR || local_file_info.compressed_size == 0) { + /* Search backwards for the descriptor, seeking too far back will be incorrect if compressed size is small */ + err = mz_stream_find_reverse(zip->stream, (const void *)descriptor_magic, sizeof(descriptor_magic), + MZ_ZIP_SIZE_MAX_DATA_DESCRIPTOR, &descriptor_pos); + if (err == MZ_OK) { + if (mz_zip_extrafield_contains(local_file_info.extrafield, + local_file_info.extrafield_size, MZ_ZIP_EXTENSION_ZIP64, NULL) == MZ_OK) + zip64 = 1; + + err = mz_zip_entry_read_descriptor(zip->stream, zip64, &crc32, + &compressed_size, &uncompressed_size); + + if (err == MZ_OK) { + if (local_file_info.crc == 0) + local_file_info.crc = crc32; + if (local_file_info.compressed_size == 0) + local_file_info.compressed_size = compressed_size; + if (local_file_info.uncompressed_size == 0) + local_file_info.uncompressed_size = uncompressed_size; + } + + compressed_end_pos = descriptor_pos; + } else if (eof) { + compressed_end_pos = next_header_pos; + } else if (local_file_info.flag & MZ_ZIP_FLAG_DATA_DESCRIPTOR) { + /* Wrong local file entry found, keep searching */ + next_header_pos += 1; + mz_stream_seek(zip->stream, next_header_pos, MZ_SEEK_SET); + continue; + } + } else { + compressed_end_pos = next_header_pos; + } + + break; + } + + compressed_size = compressed_end_pos - compressed_pos; + + if (compressed_size > UINT32_MAX) { + /* Update sizes if 4GB file is written with no ZIP64 support */ + if (local_file_info.uncompressed_size < UINT32_MAX) { + local_file_info.compressed_size = compressed_size; + local_file_info.uncompressed_size = 0; + } + } + + mz_zip_print("Zip - Recover - Entry %s (csize %" PRId64 " usize %" PRId64 " flags 0x%" PRIx16 ")\n", + local_file_info.filename, local_file_info.compressed_size, local_file_info.uncompressed_size, + local_file_info.flag); + + /* Rewrite central dir with local headers and offsets */ + err = mz_zip_entry_write_header(cd_mem_stream, 0, &local_file_info); + if (err == MZ_OK) + number_entry += 1; + + err = mz_stream_seek(zip->stream, next_header_pos, MZ_SEEK_SET); + } + + mz_stream_mem_delete(&local_file_info_stream); + + mz_zip_print("Zip - Recover - Complete (cddisk %" PRId32 " entries %" PRId64 ")\n", + disk_number_with_cd, number_entry); + + if (number_entry == 0) + return err; + + /* Set new upper seek boundary for central dir mem stream */ + disk_offset = mz_stream_tell(cd_mem_stream); + mz_stream_mem_set_buffer_limit(cd_mem_stream, (int32_t)disk_offset); + + /* Set new central directory info */ + mz_zip_set_cd_stream(handle, 0, cd_mem_stream); + mz_zip_set_number_entry(handle, number_entry); + mz_zip_set_disk_number_with_cd(handle, disk_number_with_cd); + + return MZ_OK; +} + +void *mz_zip_create(void **handle) { + mz_zip *zip = NULL; + + zip = (mz_zip *)MZ_ALLOC(sizeof(mz_zip)); + if (zip != NULL) { + memset(zip, 0, sizeof(mz_zip)); + zip->data_descriptor = 1; + } + if (handle != NULL) + *handle = zip; + + return zip; +} + +void mz_zip_delete(void **handle) { + mz_zip *zip = NULL; + if (handle == NULL) + return; + zip = (mz_zip *)*handle; + if (zip != NULL) { + MZ_FREE(zip); + } + *handle = NULL; +} + +int32_t mz_zip_open(void *handle, void *stream, int32_t mode) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + + + if (zip == NULL) + return MZ_PARAM_ERROR; + + mz_zip_print("Zip - Open\n"); + + zip->stream = stream; + + mz_stream_mem_create(&zip->cd_mem_stream); + + if (mode & MZ_OPEN_MODE_WRITE) { + mz_stream_mem_open(zip->cd_mem_stream, NULL, MZ_OPEN_MODE_CREATE); + zip->cd_stream = zip->cd_mem_stream; + } else { + zip->cd_stream = stream; + } + + if ((mode & MZ_OPEN_MODE_READ) || (mode & MZ_OPEN_MODE_APPEND)) { + if ((mode & MZ_OPEN_MODE_CREATE) == 0) { + err = mz_zip_read_cd(zip); + if (err != MZ_OK) { + mz_zip_print("Zip - Error detected reading cd (%" PRId32 ")\n", err); + if (zip->recover && mz_zip_recover_cd(zip) == MZ_OK) + err = MZ_OK; + } + } + + if ((err == MZ_OK) && (mode & MZ_OPEN_MODE_APPEND)) { + if (zip->cd_size > 0) { + /* Store central directory in memory */ + err = mz_stream_seek(zip->stream, zip->cd_offset, MZ_SEEK_SET); + if (err == MZ_OK) + err = mz_stream_copy(zip->cd_mem_stream, zip->stream, (int32_t)zip->cd_size); + if (err == MZ_OK) + err = mz_stream_seek(zip->stream, zip->cd_offset, MZ_SEEK_SET); + } else { + if (zip->cd_signature == MZ_ZIP_MAGIC_ENDHEADER) { + /* If tiny zip then overwrite end header */ + err = mz_stream_seek(zip->stream, zip->cd_offset, MZ_SEEK_SET); + } else { + /* If no central directory, append new zip to end of file */ + err = mz_stream_seek(zip->stream, 0, MZ_SEEK_END); + } + } + + if (zip->disk_number_with_cd > 0) { + /* Move to last disk to begin appending */ + mz_stream_set_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, zip->disk_number_with_cd - 1); + } + } else { + zip->cd_start_pos = zip->cd_offset; + } + } + + if (err != MZ_OK) { + mz_zip_close(zip); + return err; + } + + /* Memory streams used to store variable length file info data */ + mz_stream_mem_create(&zip->file_info_stream); + mz_stream_mem_open(zip->file_info_stream, NULL, MZ_OPEN_MODE_CREATE); + + mz_stream_mem_create(&zip->local_file_info_stream); + mz_stream_mem_open(zip->local_file_info_stream, NULL, MZ_OPEN_MODE_CREATE); + + zip->open_mode = mode; + + return err; +} + +int32_t mz_zip_close(void *handle) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + mz_zip_print("Zip - Close\n"); + + if (mz_zip_entry_is_open(handle) == MZ_OK) + err = mz_zip_entry_close(handle); + + if ((err == MZ_OK) && (zip->open_mode & MZ_OPEN_MODE_WRITE)) + err = mz_zip_write_cd(handle); + + if (zip->cd_mem_stream != NULL) { + mz_stream_close(zip->cd_mem_stream); + mz_stream_delete(&zip->cd_mem_stream); + } + + if (zip->file_info_stream != NULL) { + mz_stream_mem_close(zip->file_info_stream); + mz_stream_mem_delete(&zip->file_info_stream); + } + if (zip->local_file_info_stream != NULL) { + mz_stream_mem_close(zip->local_file_info_stream); + mz_stream_mem_delete(&zip->local_file_info_stream); + } + + if (zip->comment) { + MZ_FREE(zip->comment); + zip->comment = NULL; + } + + zip->stream = NULL; + zip->cd_stream = NULL; + + return err; +} + +int32_t mz_zip_get_comment(void *handle, const char **comment) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || comment == NULL) + return MZ_PARAM_ERROR; + if (zip->comment == NULL) + return MZ_EXIST_ERROR; + *comment = zip->comment; + return MZ_OK; +} + +int32_t mz_zip_set_comment(void *handle, const char *comment) { + mz_zip *zip = (mz_zip *)handle; + int32_t comment_size = 0; + if (zip == NULL || comment == NULL) + return MZ_PARAM_ERROR; + if (zip->comment != NULL) + MZ_FREE(zip->comment); + comment_size = (int32_t)strlen(comment); + if (comment_size > UINT16_MAX) + return MZ_PARAM_ERROR; + zip->comment = (char *)MZ_ALLOC(comment_size+1); + if (zip->comment == NULL) + return MZ_MEM_ERROR; + memset(zip->comment, 0, comment_size+1); + strncpy(zip->comment, comment, comment_size); + return MZ_OK; +} + +int32_t mz_zip_get_version_madeby(void *handle, uint16_t *version_madeby) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || version_madeby == NULL) + return MZ_PARAM_ERROR; + *version_madeby = zip->version_madeby; + return MZ_OK; +} + +int32_t mz_zip_set_version_madeby(void *handle, uint16_t version_madeby) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL) + return MZ_PARAM_ERROR; + zip->version_madeby = version_madeby; + return MZ_OK; +} + +int32_t mz_zip_set_recover(void *handle, uint8_t recover) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL) + return MZ_PARAM_ERROR; + zip->recover = recover; + return MZ_OK; +} + +int32_t mz_zip_set_data_descriptor(void *handle, uint8_t data_descriptor) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL) + return MZ_PARAM_ERROR; + zip->data_descriptor = data_descriptor; + return MZ_OK; +} + +int32_t mz_zip_get_stream(void *handle, void **stream) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || stream == NULL) + return MZ_PARAM_ERROR; + *stream = zip->stream; + if (*stream == NULL) + return MZ_EXIST_ERROR; + return MZ_OK; +} + +int32_t mz_zip_set_cd_stream(void *handle, int64_t cd_start_pos, void *cd_stream) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || cd_stream == NULL) + return MZ_PARAM_ERROR; + zip->cd_offset = 0; + zip->cd_stream = cd_stream; + zip->cd_start_pos = cd_start_pos; + return MZ_OK; +} + +int32_t mz_zip_get_cd_mem_stream(void *handle, void **cd_mem_stream) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || cd_mem_stream == NULL) + return MZ_PARAM_ERROR; + *cd_mem_stream = zip->cd_mem_stream; + if (*cd_mem_stream == NULL) + return MZ_EXIST_ERROR; + return MZ_OK; +} + +int32_t mz_zip_set_number_entry(void *handle, uint64_t number_entry) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL) + return MZ_PARAM_ERROR; + zip->number_entry = number_entry; + return MZ_OK; +} + +int32_t mz_zip_get_number_entry(void *handle, uint64_t *number_entry) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || number_entry == NULL) + return MZ_PARAM_ERROR; + *number_entry = zip->number_entry; + return MZ_OK; +} + +int32_t mz_zip_set_disk_number_with_cd(void *handle, uint32_t disk_number_with_cd) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL) + return MZ_PARAM_ERROR; + zip->disk_number_with_cd = disk_number_with_cd; + return MZ_OK; +} + +int32_t mz_zip_get_disk_number_with_cd(void *handle, uint32_t *disk_number_with_cd) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || disk_number_with_cd == NULL) + return MZ_PARAM_ERROR; + *disk_number_with_cd = zip->disk_number_with_cd; + return MZ_OK; +} + +static int32_t mz_zip_entry_close_int(void *handle) { + mz_zip *zip = (mz_zip *)handle; + + if (zip->crypt_stream != NULL) + mz_stream_delete(&zip->crypt_stream); + zip->crypt_stream = NULL; + if (zip->compress_stream != NULL) + mz_stream_delete(&zip->compress_stream); + zip->compress_stream = NULL; + + zip->entry_opened = 0; + + return MZ_OK; +} + +static int32_t mz_zip_entry_open_int(void *handle, uint8_t raw, int16_t compress_level, const char *password) { + mz_zip *zip = (mz_zip *)handle; + int64_t max_total_in = 0; + int64_t header_size = 0; + int64_t footer_size = 0; + int32_t err = MZ_OK; + uint8_t use_crypt = 0; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + switch (zip->file_info.compression_method) { + case MZ_COMPRESS_METHOD_STORE: + case MZ_COMPRESS_METHOD_DEFLATE: +#ifdef HAVE_BZIP2 + case MZ_COMPRESS_METHOD_BZIP2: +#endif +#ifdef HAVE_LZMA + case MZ_COMPRESS_METHOD_LZMA: +#endif +#if defined(HAVE_LZMA) || defined(HAVE_LIBCOMP) + case MZ_COMPRESS_METHOD_XZ: +#endif +#ifdef HAVE_ZSTD + case MZ_COMPRESS_METHOD_ZSTD: +#endif + err = MZ_OK; + break; + default: + return MZ_SUPPORT_ERROR; + } + +#ifndef HAVE_WZAES + if (zip->file_info.aes_version) + return MZ_SUPPORT_ERROR; +#endif + + zip->entry_raw = raw; + + if ((zip->file_info.flag & MZ_ZIP_FLAG_ENCRYPTED) && (password != NULL)) { + if (zip->open_mode & MZ_OPEN_MODE_WRITE) { + /* Encrypt only when we are not trying to write raw and password is supplied. */ + if (!zip->entry_raw) + use_crypt = 1; + } else if (zip->open_mode & MZ_OPEN_MODE_READ) { + /* Decrypt only when password is supplied. Don't error when password */ + /* is not supplied as we may want to read the raw encrypted data. */ + use_crypt = 1; + } + } + + if ((err == MZ_OK) && (use_crypt)) { +#ifdef HAVE_WZAES + if (zip->file_info.aes_version) { + mz_stream_wzaes_create(&zip->crypt_stream); + mz_stream_wzaes_set_password(zip->crypt_stream, password); + mz_stream_wzaes_set_encryption_mode(zip->crypt_stream, zip->file_info.aes_encryption_mode); + } else +#endif + { +#ifdef HAVE_PKCRYPT + uint8_t verify1 = (uint8_t)((zip->file_info.pk_verify >> 8) & 0xff); + uint8_t verify2 = (uint8_t)((zip->file_info.pk_verify) & 0xff); + + mz_stream_pkcrypt_create(&zip->crypt_stream); + mz_stream_pkcrypt_set_password(zip->crypt_stream, password); + mz_stream_pkcrypt_set_verify(zip->crypt_stream, verify1, verify2); +#endif + } + } + + if (err == MZ_OK) { + if (zip->crypt_stream == NULL) + mz_stream_raw_create(&zip->crypt_stream); + + mz_stream_set_base(zip->crypt_stream, zip->stream); + + err = mz_stream_open(zip->crypt_stream, NULL, zip->open_mode); + } + + if (err == MZ_OK) { + if (zip->entry_raw || zip->file_info.compression_method == MZ_COMPRESS_METHOD_STORE) + mz_stream_raw_create(&zip->compress_stream); +#ifdef HAVE_ZLIB + else if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_DEFLATE) + mz_stream_zlib_create(&zip->compress_stream); +#endif +#ifdef HAVE_BZIP2 + else if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_BZIP2) + mz_stream_bzip_create(&zip->compress_stream); +#endif +#ifdef HAVE_LIBCOMP + else if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_DEFLATE || + zip->file_info.compression_method == MZ_COMPRESS_METHOD_XZ) { + mz_stream_libcomp_create(&zip->compress_stream); + mz_stream_set_prop_int64(zip->compress_stream, MZ_STREAM_PROP_COMPRESS_METHOD, + zip->file_info.compression_method); + } +#endif +#ifdef HAVE_LZMA + else if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_LZMA || + zip->file_info.compression_method == MZ_COMPRESS_METHOD_XZ) { + mz_stream_lzma_create(&zip->compress_stream); + mz_stream_set_prop_int64(zip->compress_stream, MZ_STREAM_PROP_COMPRESS_METHOD, + zip->file_info.compression_method); + } +#endif +#ifdef HAVE_ZSTD + else if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_ZSTD) + mz_stream_zstd_create(&zip->compress_stream); +#endif + else + err = MZ_PARAM_ERROR; + } + + if (err == MZ_OK) { + if (zip->open_mode & MZ_OPEN_MODE_WRITE) { + mz_stream_set_prop_int64(zip->compress_stream, MZ_STREAM_PROP_COMPRESS_LEVEL, compress_level); + } else { + int32_t set_end_of_stream = 0; + +#ifndef HAVE_LIBCOMP + if (zip->entry_raw || + zip->file_info.compression_method == MZ_COMPRESS_METHOD_STORE || + zip->file_info.flag & MZ_ZIP_FLAG_ENCRYPTED) +#endif + { + max_total_in = zip->file_info.compressed_size; + mz_stream_set_prop_int64(zip->crypt_stream, MZ_STREAM_PROP_TOTAL_IN_MAX, max_total_in); + + if (mz_stream_get_prop_int64(zip->crypt_stream, MZ_STREAM_PROP_HEADER_SIZE, &header_size) == MZ_OK) + max_total_in -= header_size; + if (mz_stream_get_prop_int64(zip->crypt_stream, MZ_STREAM_PROP_FOOTER_SIZE, &footer_size) == MZ_OK) + max_total_in -= footer_size; + + mz_stream_set_prop_int64(zip->compress_stream, MZ_STREAM_PROP_TOTAL_IN_MAX, max_total_in); + } + + switch (zip->file_info.compression_method) { + case MZ_COMPRESS_METHOD_LZMA: + case MZ_COMPRESS_METHOD_XZ: + set_end_of_stream = (zip->file_info.flag & MZ_ZIP_FLAG_LZMA_EOS_MARKER); + break; + case MZ_COMPRESS_METHOD_ZSTD: + set_end_of_stream = 1; + break; + } + + if (set_end_of_stream) { + mz_stream_set_prop_int64(zip->compress_stream, MZ_STREAM_PROP_TOTAL_IN_MAX, zip->file_info.compressed_size); + mz_stream_set_prop_int64(zip->compress_stream, MZ_STREAM_PROP_TOTAL_OUT_MAX, zip->file_info.uncompressed_size); + } + } + + mz_stream_set_base(zip->compress_stream, zip->crypt_stream); + + err = mz_stream_open(zip->compress_stream, NULL, zip->open_mode); + } + + if (err == MZ_OK) { + zip->entry_opened = 1; + zip->entry_crc32 = 0; + } else { + mz_zip_entry_close_int(handle); + } + + return err; +} + +int32_t mz_zip_entry_is_open(void *handle) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL) + return MZ_PARAM_ERROR; + if (zip->entry_opened == 0) + return MZ_EXIST_ERROR; + return MZ_OK; +} + +int32_t mz_zip_entry_read_open(void *handle, uint8_t raw, const char *password) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + int32_t err_shift = MZ_OK; + +#if defined(MZ_ZIP_NO_ENCRYPTION) + if (password != NULL) + return MZ_SUPPORT_ERROR; +#endif + if (zip == NULL) + return MZ_PARAM_ERROR; + if ((zip->open_mode & MZ_OPEN_MODE_READ) == 0) + return MZ_PARAM_ERROR; + if (zip->entry_scanned == 0) + return MZ_PARAM_ERROR; + + mz_zip_print("Zip - Entry - Read open (raw %" PRId32 ")\n", raw); + + err = mz_zip_entry_seek_local_header(handle); + if (err == MZ_OK) + err = mz_zip_entry_read_header(zip->stream, 1, &zip->local_file_info, zip->local_file_info_stream); + + if (err == MZ_FORMAT_ERROR && zip->disk_offset_shift > 0) { + /* Perhaps we didn't compensated correctly for incorrect cd offset */ + err_shift = mz_stream_seek(zip->stream, zip->file_info.disk_offset, MZ_SEEK_SET); + if (err_shift == MZ_OK) + err_shift = mz_zip_entry_read_header(zip->stream, 1, &zip->local_file_info, zip->local_file_info_stream); + if (err_shift == MZ_OK) { + zip->disk_offset_shift = 0; + err = err_shift; + } + } + +#ifdef MZ_ZIP_NO_DECOMPRESSION + if (!raw && zip->file_info.compression_method != MZ_COMPRESS_METHOD_STORE) + err = MZ_SUPPORT_ERROR; +#endif + if (err == MZ_OK) + err = mz_zip_entry_open_int(handle, raw, 0, password); + + return err; +} + +int32_t mz_zip_entry_write_open(void *handle, const mz_zip_file *file_info, int16_t compress_level, uint8_t raw, const char *password) { + mz_zip *zip = (mz_zip *)handle; + int64_t filename_pos = -1; + int64_t extrafield_pos = 0; + int64_t comment_pos = 0; + int64_t linkname_pos = 0; + int64_t disk_number = 0; + uint8_t is_dir = 0; + int32_t err = MZ_OK; + +#if defined(MZ_ZIP_NO_ENCRYPTION) + if (password != NULL) + return MZ_SUPPORT_ERROR; +#endif + if (zip == NULL || file_info == NULL || file_info->filename == NULL) + return MZ_PARAM_ERROR; + + if (mz_zip_entry_is_open(handle) == MZ_OK) { + err = mz_zip_entry_close(handle); + if (err != MZ_OK) + return err; + } + + memcpy(&zip->file_info, file_info, sizeof(mz_zip_file)); + + mz_zip_print("Zip - Entry - Write open - %s (level %" PRId16 " raw %" PRId8 ")\n", + zip->file_info.filename, compress_level, raw); + + mz_stream_seek(zip->file_info_stream, 0, MZ_SEEK_SET); + mz_stream_write(zip->file_info_stream, file_info, sizeof(mz_zip_file)); + + /* Copy filename, extrafield, and comment internally */ + filename_pos = mz_stream_tell(zip->file_info_stream); + if (file_info->filename != NULL) + mz_stream_write(zip->file_info_stream, file_info->filename, (int32_t)strlen(file_info->filename)); + mz_stream_write_uint8(zip->file_info_stream, 0); + + extrafield_pos = mz_stream_tell(zip->file_info_stream); + if (file_info->extrafield != NULL) + mz_stream_write(zip->file_info_stream, file_info->extrafield, file_info->extrafield_size); + mz_stream_write_uint8(zip->file_info_stream, 0); + + comment_pos = mz_stream_tell(zip->file_info_stream); + if (file_info->comment != NULL) + mz_stream_write(zip->file_info_stream, file_info->comment, file_info->comment_size); + mz_stream_write_uint8(zip->file_info_stream, 0); + + linkname_pos = mz_stream_tell(zip->file_info_stream); + if (file_info->linkname != NULL) + mz_stream_write(zip->file_info_stream, file_info->linkname, (int32_t)strlen(file_info->linkname)); + mz_stream_write_uint8(zip->file_info_stream, 0); + + mz_stream_mem_get_buffer_at(zip->file_info_stream, filename_pos, (const void **)&zip->file_info.filename); + mz_stream_mem_get_buffer_at(zip->file_info_stream, extrafield_pos, (const void **)&zip->file_info.extrafield); + mz_stream_mem_get_buffer_at(zip->file_info_stream, comment_pos, (const void **)&zip->file_info.comment); + mz_stream_mem_get_buffer_at(zip->file_info_stream, linkname_pos, (const void **)&zip->file_info.linkname); + + if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_DEFLATE) { + if ((compress_level == 8) || (compress_level == 9)) + zip->file_info.flag |= MZ_ZIP_FLAG_DEFLATE_MAX; + if (compress_level == 2) + zip->file_info.flag |= MZ_ZIP_FLAG_DEFLATE_FAST; + if (compress_level == 1) + zip->file_info.flag |= MZ_ZIP_FLAG_DEFLATE_SUPER_FAST; + } +#if defined(HAVE_LZMA) || defined(HAVE_LIBCOMP) + else if (zip->file_info.compression_method == MZ_COMPRESS_METHOD_LZMA || + zip->file_info.compression_method == MZ_COMPRESS_METHOD_XZ) + zip->file_info.flag |= MZ_ZIP_FLAG_LZMA_EOS_MARKER; +#endif + + if (mz_zip_attrib_is_dir(zip->file_info.external_fa, zip->file_info.version_madeby) == MZ_OK) + is_dir = 1; + + if (!is_dir) { + if (zip->data_descriptor) + zip->file_info.flag |= MZ_ZIP_FLAG_DATA_DESCRIPTOR; + if (password != NULL) + zip->file_info.flag |= MZ_ZIP_FLAG_ENCRYPTED; + } + + mz_stream_get_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, &disk_number); + zip->file_info.disk_number = (uint32_t)disk_number; + zip->file_info.disk_offset = mz_stream_tell(zip->stream); + + if (zip->file_info.flag & MZ_ZIP_FLAG_ENCRYPTED) { +#ifdef HAVE_PKCRYPT + /* Pre-calculated CRC value is required for PKWARE traditional encryption */ + uint32_t dos_date = mz_zip_time_t_to_dos_date(zip->file_info.modified_date); + zip->file_info.pk_verify = mz_zip_get_pk_verify(dos_date, zip->file_info.crc, zip->file_info.flag); +#endif +#ifdef HAVE_WZAES + if (zip->file_info.aes_version && zip->file_info.aes_encryption_mode == 0) + zip->file_info.aes_encryption_mode = MZ_AES_ENCRYPTION_MODE_256; +#endif + } + + zip->file_info.crc = 0; + zip->file_info.compressed_size = 0; + + if ((compress_level == 0) || (is_dir)) + zip->file_info.compression_method = MZ_COMPRESS_METHOD_STORE; + +#ifdef MZ_ZIP_NO_COMPRESSION + if (zip->file_info.compression_method != MZ_COMPRESS_METHOD_STORE) + err = MZ_SUPPORT_ERROR; +#endif + if (err == MZ_OK) + err = mz_zip_entry_write_header(zip->stream, 1, &zip->file_info); + if (err == MZ_OK) + err = mz_zip_entry_open_int(handle, raw, compress_level, password); + + return err; +} + +int32_t mz_zip_entry_read(void *handle, void *buf, int32_t len) { + mz_zip *zip = (mz_zip *)handle; + int32_t read = 0; + + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + if (UINT_MAX == UINT16_MAX && len > UINT16_MAX) /* zlib limitation */ + return MZ_PARAM_ERROR; + if (len == 0) + return MZ_PARAM_ERROR; + + if (zip->file_info.compressed_size == 0) + return 0; + + /* Read entire entry even if uncompressed_size = 0, otherwise */ + /* aes encryption validation will fail if compressed_size > 0 */ + read = mz_stream_read(zip->compress_stream, buf, len); + if (read > 0) + zip->entry_crc32 = mz_crypt_crc32_update(zip->entry_crc32, buf, read); + + mz_zip_print("Zip - Entry - Read - %" PRId32 " (max %" PRId32 ")\n", read, len); + + return read; +} + +int32_t mz_zip_entry_write(void *handle, const void *buf, int32_t len) { + mz_zip *zip = (mz_zip *)handle; + int32_t written = 0; + + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + written = mz_stream_write(zip->compress_stream, buf, len); + if (written > 0) + zip->entry_crc32 = mz_crypt_crc32_update(zip->entry_crc32, buf, written); + + mz_zip_print("Zip - Entry - Write - %" PRId32 " (max %" PRId32 ")\n", written, len); + + return written; +} + +int32_t mz_zip_entry_read_close(void *handle, uint32_t *crc32, int64_t *compressed_size, + int64_t *uncompressed_size) { + mz_zip *zip = (mz_zip *)handle; + int64_t total_in = 0; + int32_t err = MZ_OK; + uint8_t zip64 = 0; + + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + + mz_stream_close(zip->compress_stream); + + mz_zip_print("Zip - Entry - Read Close\n"); + + if (crc32 != NULL) + *crc32 = zip->file_info.crc; + if (compressed_size != NULL) + *compressed_size = zip->file_info.compressed_size; + if (uncompressed_size != NULL) + *uncompressed_size = zip->file_info.uncompressed_size; + + mz_stream_get_prop_int64(zip->compress_stream, MZ_STREAM_PROP_TOTAL_IN, &total_in); + + if ((zip->file_info.flag & MZ_ZIP_FLAG_DATA_DESCRIPTOR) && + ((zip->file_info.flag & MZ_ZIP_FLAG_MASK_LOCAL_INFO) == 0) && + (crc32 != NULL || compressed_size != NULL || uncompressed_size != NULL)) { + /* Check to see if data descriptor is zip64 bit format or not */ + if (mz_zip_extrafield_contains(zip->local_file_info.extrafield, + zip->local_file_info.extrafield_size, MZ_ZIP_EXTENSION_ZIP64, NULL) == MZ_OK) + zip64 = 1; + + err = mz_zip_entry_seek_local_header(handle); + + /* Seek to end of compressed stream since we might have over-read during compression */ + if (err == MZ_OK) + err = mz_stream_seek(zip->stream, MZ_ZIP_SIZE_LD_ITEM + + (int64_t)zip->local_file_info.filename_size + + (int64_t)zip->local_file_info.extrafield_size + + total_in, MZ_SEEK_CUR); + + /* Read data descriptor */ + if (err == MZ_OK) + err = mz_zip_entry_read_descriptor(zip->stream, zip64, + crc32, compressed_size, uncompressed_size); + } + + /* If entire entry was not read verification will fail */ + if ((err == MZ_OK) && (total_in > 0) && (!zip->entry_raw)) { +#ifdef HAVE_WZAES + /* AES zip version AE-1 will expect a valid crc as well */ + if (zip->file_info.aes_version <= 0x0001) +#endif + { + if (zip->entry_crc32 != zip->file_info.crc) { + mz_zip_print("Zip - Entry - Crc failed (actual 0x%08" PRIx32 " expected 0x%08" PRIx32 ")\n", + zip->entry_crc32, zip->file_info.crc); + + err = MZ_CRC_ERROR; + } + } + } + + mz_zip_entry_close_int(handle); + + return err; +} + +int32_t mz_zip_entry_write_close(void *handle, uint32_t crc32, int64_t compressed_size, + int64_t uncompressed_size) { + mz_zip *zip = (mz_zip *)handle; + int64_t end_disk_number = 0; + int32_t err = MZ_OK; + uint8_t zip64 = 0; + + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + + mz_stream_close(zip->compress_stream); + + if (!zip->entry_raw) + crc32 = zip->entry_crc32; + + mz_zip_print("Zip - Entry - Write Close (crc 0x%08" PRIx32 " cs %" PRId64 " ucs %" PRId64 ")\n", + crc32, compressed_size, uncompressed_size); + + /* If sizes are not set, then read them from the compression stream */ + if (compressed_size < 0) + mz_stream_get_prop_int64(zip->compress_stream, MZ_STREAM_PROP_TOTAL_OUT, &compressed_size); + if (uncompressed_size < 0) + mz_stream_get_prop_int64(zip->compress_stream, MZ_STREAM_PROP_TOTAL_IN, &uncompressed_size); + + if (zip->file_info.flag & MZ_ZIP_FLAG_ENCRYPTED) { + mz_stream_set_base(zip->crypt_stream, zip->stream); + err = mz_stream_close(zip->crypt_stream); + + mz_stream_get_prop_int64(zip->crypt_stream, MZ_STREAM_PROP_TOTAL_OUT, &compressed_size); + } + + mz_zip_entry_needs_zip64(&zip->file_info, 1, &zip64); + + if ((err == MZ_OK) && (zip->file_info.flag & MZ_ZIP_FLAG_DATA_DESCRIPTOR)) { + /* Determine if we need to write data descriptor in zip64 format, + if local extrafield was saved with zip64 extrafield */ + + if (zip->file_info.flag & MZ_ZIP_FLAG_MASK_LOCAL_INFO) + err = mz_zip_entry_write_descriptor(zip->stream, + zip64, 0, compressed_size, 0); + else + err = mz_zip_entry_write_descriptor(zip->stream, + zip64, crc32, compressed_size, uncompressed_size); + } + + /* Write file info to central directory */ + + mz_zip_print("Zip - Entry - Write cd (ucs %" PRId64 " cs %" PRId64 " crc 0x%08" PRIx32 ")\n", + uncompressed_size, compressed_size, crc32); + + zip->file_info.crc = crc32; + zip->file_info.compressed_size = compressed_size; + zip->file_info.uncompressed_size = uncompressed_size; + + if (err == MZ_OK) + err = mz_zip_entry_write_header(zip->cd_mem_stream, 0, &zip->file_info); + + /* Update local header with crc32 and sizes */ + if ((err == MZ_OK) && ((zip->file_info.flag & MZ_ZIP_FLAG_DATA_DESCRIPTOR) == 0) && + ((zip->file_info.flag & MZ_ZIP_FLAG_MASK_LOCAL_INFO) == 0)) { + /* Save the disk number and position we are to seek back after updating local header */ + int64_t end_pos = mz_stream_tell(zip->stream); + mz_stream_get_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, &end_disk_number); + + err = mz_zip_entry_seek_local_header(handle); + + if (err == MZ_OK) { + /* Seek to crc32 and sizes offset in local header */ + err = mz_stream_seek(zip->stream, MZ_ZIP_OFFSET_CRC_SIZES, MZ_SEEK_CUR); + } + + if (err == MZ_OK) + err = mz_zip_entry_write_crc_sizes(zip->stream, zip64, 0, &zip->file_info); + + /* Seek to and update zip64 extension sizes */ + if ((err == MZ_OK) && (zip64)) { + int64_t filename_size = zip->file_info.filename_size; + + if (filename_size == 0) + filename_size = strlen(zip->file_info.filename); + + /* Since we write zip64 extension first we know its offset */ + err = mz_stream_seek(zip->stream, 2 + 2 + filename_size + 4, MZ_SEEK_CUR); + + if (err == MZ_OK) + err = mz_stream_write_uint64(zip->stream, zip->file_info.uncompressed_size); + if (err == MZ_OK) + err = mz_stream_write_uint64(zip->stream, zip->file_info.compressed_size); + } + + mz_stream_set_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, end_disk_number); + mz_stream_seek(zip->stream, end_pos, MZ_SEEK_SET); + } + + zip->number_entry += 1; + + mz_zip_entry_close_int(handle); + + return err; +} + +int32_t mz_zip_entry_seek_local_header(void *handle) { + mz_zip *zip = (mz_zip *)handle; + int64_t disk_size = 0; + uint32_t disk_number = zip->file_info.disk_number; + + if (disk_number == zip->disk_number_with_cd) { + mz_stream_get_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_SIZE, &disk_size); + if ((disk_size == 0) || ((zip->open_mode & MZ_OPEN_MODE_WRITE) == 0)) + disk_number = (uint32_t)-1; + } + + mz_stream_set_prop_int64(zip->stream, MZ_STREAM_PROP_DISK_NUMBER, disk_number); + + mz_zip_print("Zip - Entry - Seek local (disk %" PRId32 " offset %" PRId64 ")\n", + disk_number, zip->file_info.disk_offset); + + /* Guard against seek overflows */ + if ((zip->disk_offset_shift > 0) && + (zip->file_info.disk_offset > (INT64_MAX - zip->disk_offset_shift))) + return MZ_FORMAT_ERROR; + + return mz_stream_seek(zip->stream, zip->file_info.disk_offset + zip->disk_offset_shift, MZ_SEEK_SET); +} + +int32_t mz_zip_entry_close(void *handle) { + return mz_zip_entry_close_raw(handle, UINT64_MAX, 0); +} + +int32_t mz_zip_entry_close_raw(void *handle, int64_t uncompressed_size, uint32_t crc32) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + + if (zip->open_mode & MZ_OPEN_MODE_WRITE) + err = mz_zip_entry_write_close(handle, crc32, UINT64_MAX, uncompressed_size); + else + err = mz_zip_entry_read_close(handle, NULL, NULL, NULL); + + return err; +} + +int32_t mz_zip_entry_is_dir(void *handle) { + mz_zip *zip = (mz_zip *)handle; + int32_t filename_length = 0; + + if (zip == NULL) + return MZ_PARAM_ERROR; + if (zip->entry_scanned == 0) + return MZ_PARAM_ERROR; + if (mz_zip_attrib_is_dir(zip->file_info.external_fa, zip->file_info.version_madeby) == MZ_OK) + return MZ_OK; + + filename_length = (int32_t)strlen(zip->file_info.filename); + if (filename_length > 0) { + if ((zip->file_info.filename[filename_length - 1] == '/') || + (zip->file_info.filename[filename_length - 1] == '\\')) + return MZ_OK; + } + return MZ_EXIST_ERROR; +} + +int32_t mz_zip_entry_is_symlink(void *handle) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL) + return MZ_PARAM_ERROR; + if (zip->entry_scanned == 0) + return MZ_PARAM_ERROR; + if (mz_zip_attrib_is_symlink(zip->file_info.external_fa, zip->file_info.version_madeby) != MZ_OK) + return MZ_EXIST_ERROR; + if (zip->file_info.linkname == NULL || *zip->file_info.linkname == 0) + return MZ_EXIST_ERROR; + + return MZ_OK; +} + +int32_t mz_zip_entry_get_info(void *handle, mz_zip_file **file_info) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + if ((zip->open_mode & MZ_OPEN_MODE_WRITE) == 0) { + if (!zip->entry_scanned) + return MZ_PARAM_ERROR; + } + + *file_info = &zip->file_info; + return MZ_OK; +} + +int32_t mz_zip_entry_get_local_info(void *handle, mz_zip_file **local_file_info) { + mz_zip *zip = (mz_zip *)handle; + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + *local_file_info = &zip->local_file_info; + return MZ_OK; +} + +int32_t mz_zip_entry_set_extrafield(void *handle, const uint8_t *extrafield, uint16_t extrafield_size) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL || mz_zip_entry_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + + zip->file_info.extrafield = extrafield; + zip->file_info.extrafield_size = extrafield_size; + return MZ_OK; +} + +static int32_t mz_zip_goto_next_entry_int(void *handle) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + zip->entry_scanned = 0; + + mz_stream_set_prop_int64(zip->cd_stream, MZ_STREAM_PROP_DISK_NUMBER, -1); + + err = mz_stream_seek(zip->cd_stream, zip->cd_current_pos, MZ_SEEK_SET); + if (err == MZ_OK) + err = mz_zip_entry_read_header(zip->cd_stream, 0, &zip->file_info, zip->file_info_stream); + if (err == MZ_OK) + zip->entry_scanned = 1; + return err; +} + +int64_t mz_zip_get_entry(void *handle) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + return zip->cd_current_pos; +} + +int32_t mz_zip_goto_entry(void *handle, int64_t cd_pos) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + if (cd_pos < zip->cd_start_pos || cd_pos > zip->cd_start_pos + zip->cd_size) + return MZ_PARAM_ERROR; + + zip->cd_current_pos = cd_pos; + + return mz_zip_goto_next_entry_int(handle); +} + +int32_t mz_zip_goto_first_entry(void *handle) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + zip->cd_current_pos = zip->cd_start_pos; + + return mz_zip_goto_next_entry_int(handle); +} + +int32_t mz_zip_goto_next_entry(void *handle) { + mz_zip *zip = (mz_zip *)handle; + + if (zip == NULL) + return MZ_PARAM_ERROR; + + zip->cd_current_pos += (int64_t)MZ_ZIP_SIZE_CD_ITEM + zip->file_info.filename_size + + zip->file_info.extrafield_size + zip->file_info.comment_size; + + return mz_zip_goto_next_entry_int(handle); +} + +int32_t mz_zip_locate_entry(void *handle, const char *filename, uint8_t ignore_case) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + int32_t result = 0; + + if (zip == NULL || filename == NULL) + return MZ_PARAM_ERROR; + + /* If we are already on the current entry, no need to search */ + if ((zip->entry_scanned) && (zip->file_info.filename != NULL)) { + result = mz_zip_path_compare(zip->file_info.filename, filename, ignore_case); + if (result == 0) + return MZ_OK; + } + + /* Search all entries starting at the first */ + err = mz_zip_goto_first_entry(handle); + while (err == MZ_OK) { + result = mz_zip_path_compare(zip->file_info.filename, filename, ignore_case); + if (result == 0) + return MZ_OK; + + err = mz_zip_goto_next_entry(handle); + } + + return err; +} + +int32_t mz_zip_locate_first_entry(void *handle, void *userdata, mz_zip_locate_entry_cb cb) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + int32_t result = 0; + + /* Search first entry looking for match */ + err = mz_zip_goto_first_entry(handle); + if (err != MZ_OK) + return err; + + result = cb(handle, userdata, &zip->file_info); + if (result == 0) + return MZ_OK; + + return mz_zip_locate_next_entry(handle, userdata, cb); +} + +int32_t mz_zip_locate_next_entry(void *handle, void *userdata, mz_zip_locate_entry_cb cb) { + mz_zip *zip = (mz_zip *)handle; + int32_t err = MZ_OK; + int32_t result = 0; + + /* Search next entries looking for match */ + err = mz_zip_goto_next_entry(handle); + while (err == MZ_OK) { + result = cb(handle, userdata, &zip->file_info); + if (result == 0) + return MZ_OK; + + err = mz_zip_goto_next_entry(handle); + } + + return err; +} + +/***************************************************************************/ + +int32_t mz_zip_attrib_is_dir(uint32_t attrib, int32_t version_madeby) { + uint32_t posix_attrib = 0; + uint8_t system = MZ_HOST_SYSTEM(version_madeby); + int32_t err = MZ_OK; + + err = mz_zip_attrib_convert(system, attrib, MZ_HOST_SYSTEM_UNIX, &posix_attrib); + if (err == MZ_OK) { + if ((posix_attrib & 0170000) == 0040000) /* S_ISDIR */ + return MZ_OK; + } + + return MZ_EXIST_ERROR; +} + +int32_t mz_zip_attrib_is_symlink(uint32_t attrib, int32_t version_madeby) { + uint32_t posix_attrib = 0; + uint8_t system = MZ_HOST_SYSTEM(version_madeby); + int32_t err = MZ_OK; + + err = mz_zip_attrib_convert(system, attrib, MZ_HOST_SYSTEM_UNIX, &posix_attrib); + if (err == MZ_OK) { + if ((posix_attrib & 0170000) == 0120000) /* S_ISLNK */ + return MZ_OK; + } + + return MZ_EXIST_ERROR; +} + +int32_t mz_zip_attrib_convert(uint8_t src_sys, uint32_t src_attrib, uint8_t target_sys, uint32_t *target_attrib) { + if (target_attrib == NULL) + return MZ_PARAM_ERROR; + + *target_attrib = 0; + + if ((src_sys == MZ_HOST_SYSTEM_MSDOS) || (src_sys == MZ_HOST_SYSTEM_WINDOWS_NTFS)) { + if ((target_sys == MZ_HOST_SYSTEM_MSDOS) || (target_sys == MZ_HOST_SYSTEM_WINDOWS_NTFS)) { + *target_attrib = src_attrib; + return MZ_OK; + } + if ((target_sys == MZ_HOST_SYSTEM_UNIX) || (target_sys == MZ_HOST_SYSTEM_OSX_DARWIN) || (target_sys == MZ_HOST_SYSTEM_RISCOS)) + return mz_zip_attrib_win32_to_posix(src_attrib, target_attrib); + } else if ((src_sys == MZ_HOST_SYSTEM_UNIX) || (src_sys == MZ_HOST_SYSTEM_OSX_DARWIN) || (src_sys == MZ_HOST_SYSTEM_RISCOS)) { + if ((target_sys == MZ_HOST_SYSTEM_UNIX) || (target_sys == MZ_HOST_SYSTEM_OSX_DARWIN) || (target_sys == MZ_HOST_SYSTEM_RISCOS)) { + /* If high bytes are set, it contains unix specific attributes */ + if ((src_attrib >> 16) != 0) + src_attrib >>= 16; + + *target_attrib = src_attrib; + return MZ_OK; + } + if ((target_sys == MZ_HOST_SYSTEM_MSDOS) || (target_sys == MZ_HOST_SYSTEM_WINDOWS_NTFS)) + return mz_zip_attrib_posix_to_win32(src_attrib, target_attrib); + } + + return MZ_SUPPORT_ERROR; +} + +int32_t mz_zip_attrib_posix_to_win32(uint32_t posix_attrib, uint32_t *win32_attrib) { + if (win32_attrib == NULL) + return MZ_PARAM_ERROR; + + *win32_attrib = 0; + + /* S_IWUSR | S_IWGRP | S_IWOTH | S_IXUSR | S_IXGRP | S_IXOTH */ + if ((posix_attrib & 0000333) == 0 && (posix_attrib & 0000444) != 0) + *win32_attrib |= 0x01; /* FILE_ATTRIBUTE_READONLY */ + /* S_IFLNK */ + if ((posix_attrib & 0170000) == 0120000) + *win32_attrib |= 0x400; /* FILE_ATTRIBUTE_REPARSE_POINT */ + /* S_IFDIR */ + else if ((posix_attrib & 0170000) == 0040000) + *win32_attrib |= 0x10; /* FILE_ATTRIBUTE_DIRECTORY */ + /* S_IFREG */ + else + *win32_attrib |= 0x80; /* FILE_ATTRIBUTE_NORMAL */ + + return MZ_OK; +} + +int32_t mz_zip_attrib_win32_to_posix(uint32_t win32_attrib, uint32_t *posix_attrib) { + if (posix_attrib == NULL) + return MZ_PARAM_ERROR; + + *posix_attrib = 0000444; /* S_IRUSR | S_IRGRP | S_IROTH */ + /* FILE_ATTRIBUTE_READONLY */ + if ((win32_attrib & 0x01) == 0) + *posix_attrib |= 0000222; /* S_IWUSR | S_IWGRP | S_IWOTH */ + /* FILE_ATTRIBUTE_REPARSE_POINT */ + if ((win32_attrib & 0x400) == 0x400) + *posix_attrib |= 0120000; /* S_IFLNK */ + /* FILE_ATTRIBUTE_DIRECTORY */ + else if ((win32_attrib & 0x10) == 0x10) + *posix_attrib |= 0040111; /* S_IFDIR | S_IXUSR | S_IXGRP | S_IXOTH */ + else + *posix_attrib |= 0100000; /* S_IFREG */ + + return MZ_OK; +} + +/***************************************************************************/ + +int32_t mz_zip_extrafield_find(void *stream, uint16_t type, int32_t max_seek, uint16_t *length) { + int32_t err = MZ_OK; + uint16_t field_type = 0; + uint16_t field_length = 0; + + + if (max_seek < 4) + return MZ_EXIST_ERROR; + + do { + err = mz_stream_read_uint16(stream, &field_type); + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, &field_length); + if (err != MZ_OK) + break; + + if (type == field_type) { + if (length != NULL) + *length = field_length; + return MZ_OK; + } + + max_seek -= field_length - 4; + if (max_seek < 0) + return MZ_EXIST_ERROR; + + err = mz_stream_seek(stream, field_length, MZ_SEEK_CUR); + } while (err == MZ_OK); + + return MZ_EXIST_ERROR; +} + +int32_t mz_zip_extrafield_contains(const uint8_t *extrafield, int32_t extrafield_size, + uint16_t type, uint16_t *length) { + void *file_extra_stream = NULL; + int32_t err = MZ_OK; + + if (extrafield == NULL || extrafield_size == 0) + return MZ_PARAM_ERROR; + + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_set_buffer(file_extra_stream, (void *)extrafield, extrafield_size); + + err = mz_zip_extrafield_find(file_extra_stream, type, extrafield_size, length); + + mz_stream_mem_delete(&file_extra_stream); + + return err; +} + +int32_t mz_zip_extrafield_read(void *stream, uint16_t *type, uint16_t *length) { + int32_t err = MZ_OK; + if (type == NULL || length == NULL) + return MZ_PARAM_ERROR; + err = mz_stream_read_uint16(stream, type); + if (err == MZ_OK) + err = mz_stream_read_uint16(stream, length); + return err; +} + +int32_t mz_zip_extrafield_write(void *stream, uint16_t type, uint16_t length) { + int32_t err = MZ_OK; + err = mz_stream_write_uint16(stream, type); + if (err == MZ_OK) + err = mz_stream_write_uint16(stream, length); + return err; +} + +/***************************************************************************/ + +static int32_t mz_zip_invalid_date(const struct tm *ptm) { +#define datevalue_in_range(min, max, value) ((min) <= (value) && (value) <= (max)) + return (!datevalue_in_range(0, 127 + 80, ptm->tm_year) || /* 1980-based year, allow 80 extra */ + !datevalue_in_range(0, 11, ptm->tm_mon) || + !datevalue_in_range(1, 31, ptm->tm_mday) || + !datevalue_in_range(0, 23, ptm->tm_hour) || + !datevalue_in_range(0, 59, ptm->tm_min) || + !datevalue_in_range(0, 59, ptm->tm_sec)); +#undef datevalue_in_range +} + +static void mz_zip_dosdate_to_raw_tm(uint64_t dos_date, struct tm *ptm) { + uint64_t date = (uint64_t)(dos_date >> 16); + + ptm->tm_mday = (uint16_t)(date & 0x1f); + ptm->tm_mon = (uint16_t)(((date & 0x1E0) / 0x20) - 1); + ptm->tm_year = (uint16_t)(((date & 0x0FE00) / 0x0200) + 80); + ptm->tm_hour = (uint16_t)((dos_date & 0xF800) / 0x800); + ptm->tm_min = (uint16_t)((dos_date & 0x7E0) / 0x20); + ptm->tm_sec = (uint16_t)(2 * (dos_date & 0x1f)); + ptm->tm_isdst = -1; +} + +int32_t mz_zip_dosdate_to_tm(uint64_t dos_date, struct tm *ptm) { + if (ptm == NULL) + return MZ_PARAM_ERROR; + + mz_zip_dosdate_to_raw_tm(dos_date, ptm); + + if (mz_zip_invalid_date(ptm)) { + /* Invalid date stored, so don't return it */ + memset(ptm, 0, sizeof(struct tm)); + return MZ_FORMAT_ERROR; + } + return MZ_OK; +} + +time_t mz_zip_dosdate_to_time_t(uint64_t dos_date) { + struct tm ptm; + mz_zip_dosdate_to_raw_tm(dos_date, &ptm); + return mktime(&ptm); +} + +int32_t mz_zip_time_t_to_tm(time_t unix_time, struct tm *ptm) { + struct tm ltm; + if (ptm == NULL) + return MZ_PARAM_ERROR; + if (localtime_r(&unix_time, <m) == NULL) { /* Returns a 1900-based year */ + /* Invalid date stored, so don't return it */ + memset(ptm, 0, sizeof(struct tm)); + return MZ_INTERNAL_ERROR; + } + memcpy(ptm, <m, sizeof(struct tm)); + return MZ_OK; +} + +uint32_t mz_zip_time_t_to_dos_date(time_t unix_time) { + struct tm ptm; + mz_zip_time_t_to_tm(unix_time, &ptm); + return mz_zip_tm_to_dosdate((const struct tm *)&ptm); +} + +uint32_t mz_zip_tm_to_dosdate(const struct tm *ptm) { + struct tm fixed_tm; + + /* Years supported: */ + + /* [00, 79] (assumed to be between 2000 and 2079) */ + /* [80, 207] (assumed to be between 1980 and 2107, typical output of old */ + /* software that does 'year-1900' to get a double digit year) */ + /* [1980, 2107] (due to format limitations, only years 1980-2107 can be stored.) */ + + memcpy(&fixed_tm, ptm, sizeof(struct tm)); + if (fixed_tm.tm_year >= 1980) /* range [1980, 2107] */ + fixed_tm.tm_year -= 1980; + else if (fixed_tm.tm_year >= 80) /* range [80, 207] */ + fixed_tm.tm_year -= 80; + else /* range [00, 79] */ + fixed_tm.tm_year += 20; + + if (mz_zip_invalid_date(&fixed_tm)) + return 0; + + return (((uint32_t)fixed_tm.tm_mday + (32 * ((uint32_t)fixed_tm.tm_mon + 1)) + (512 * (uint32_t)fixed_tm.tm_year)) << 16) | + (((uint32_t)fixed_tm.tm_sec / 2) + (32 * (uint32_t)fixed_tm.tm_min) + (2048 * (uint32_t)fixed_tm.tm_hour)); +} + +int32_t mz_zip_ntfs_to_unix_time(uint64_t ntfs_time, time_t *unix_time) { + *unix_time = (time_t)((ntfs_time - 116444736000000000LL) / 10000000); + return MZ_OK; +} + +int32_t mz_zip_unix_to_ntfs_time(time_t unix_time, uint64_t *ntfs_time) { + *ntfs_time = ((uint64_t)unix_time * 10000000) + 116444736000000000LL; + return MZ_OK; +} + +/***************************************************************************/ + +int32_t mz_zip_path_compare(const char *path1, const char *path2, uint8_t ignore_case) { + do { + if ((*path1 == '\\' && *path2 == '/') || + (*path2 == '\\' && *path1 == '/')) { + /* Ignore comparison of path slashes */ + } else if (ignore_case) { + if (tolower(*path1) != tolower(*path2)) + break; + } else if (*path1 != *path2) { + break; + } + + path1 += 1; + path2 += 1; + } while (*path1 != 0 && *path2 != 0); + + if (ignore_case) + return (int32_t)(tolower(*path1) - tolower(*path2)); + + return (int32_t)(*path1 - *path2); +} + +/***************************************************************************/ + +const char* mz_zip_get_compression_method_string(int32_t compression_method) +{ + const char *method = "?"; + switch (compression_method) { + case MZ_COMPRESS_METHOD_STORE: + method = "stored"; + break; + case MZ_COMPRESS_METHOD_DEFLATE: + method = "deflate"; + break; + case MZ_COMPRESS_METHOD_BZIP2: + method = "bzip2"; + break; + case MZ_COMPRESS_METHOD_LZMA: + method = "lzma"; + break; + case MZ_COMPRESS_METHOD_XZ: + method = "xz"; + break; + case MZ_COMPRESS_METHOD_ZSTD: + method = "zstd"; + break; + } + return method; +} + +/***************************************************************************/ diff --git a/Externals/minizip/mz_zip.h b/Externals/minizip/mz_zip.h new file mode 100644 index 000000000000..e3d1fbd52347 --- /dev/null +++ b/Externals/minizip/mz_zip.h @@ -0,0 +1,259 @@ +/* mz_zip.h -- Zip manipulation + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + Copyright (C) 2009-2010 Mathias Svensson + Modifications for Zip64 support + http://result42.com + Copyright (C) 1998-2010 Gilles Vollant + https://www.winimage.com/zLibDll/minizip.html + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_ZIP_H +#define MZ_ZIP_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +typedef struct mz_zip_file_s { + uint16_t version_madeby; /* version made by */ + uint16_t version_needed; /* version needed to extract */ + uint16_t flag; /* general purpose bit flag */ + uint16_t compression_method; /* compression method */ + time_t modified_date; /* last modified date in unix time */ + time_t accessed_date; /* last accessed date in unix time */ + time_t creation_date; /* creation date in unix time */ + uint32_t crc; /* crc-32 */ + int64_t compressed_size; /* compressed size */ + int64_t uncompressed_size; /* uncompressed size */ + uint16_t filename_size; /* filename length */ + uint16_t extrafield_size; /* extra field length */ + uint16_t comment_size; /* file comment length */ + uint32_t disk_number; /* disk number start */ + int64_t disk_offset; /* relative offset of local header */ + uint16_t internal_fa; /* internal file attributes */ + uint32_t external_fa; /* external file attributes */ + + const char *filename; /* filename utf8 null-terminated string */ + const uint8_t *extrafield; /* extrafield data */ + const char *comment; /* comment utf8 null-terminated string */ + const char *linkname; /* sym-link filename utf8 null-terminated string */ + + uint16_t zip64; /* zip64 extension mode */ + uint16_t aes_version; /* winzip aes extension if not 0 */ + uint8_t aes_encryption_mode; /* winzip aes encryption mode */ + uint16_t pk_verify; /* pkware encryption verifier */ + +} mz_zip_file, mz_zip_entry; + +/***************************************************************************/ + +typedef int32_t (*mz_zip_locate_entry_cb)(void *handle, void *userdata, mz_zip_file *file_info); + +/***************************************************************************/ + +void * mz_zip_create(void **handle); +/* Create zip instance for opening */ + +void mz_zip_delete(void **handle); +/* Delete zip object */ + +int32_t mz_zip_open(void *handle, void *stream, int32_t mode); +/* Create a zip file, no delete file in zip functionality */ + +int32_t mz_zip_close(void *handle); +/* Close the zip file */ + +int32_t mz_zip_get_comment(void *handle, const char **comment); +/* Get a pointer to the global comment */ + +int32_t mz_zip_set_comment(void *handle, const char *comment); +/* Sets the global comment used for writing zip file */ + +int32_t mz_zip_get_version_madeby(void *handle, uint16_t *version_madeby); +/* Get the version made by */ + +int32_t mz_zip_set_version_madeby(void *handle, uint16_t version_madeby); +/* Sets the version made by used for writing zip file */ + +int32_t mz_zip_set_recover(void *handle, uint8_t recover); +/* Sets the ability to recover the central dir by reading local file headers */ + +int32_t mz_zip_set_data_descriptor(void *handle, uint8_t data_descriptor); +/* Sets the use of data descriptor flag when writing zip entries */ + +int32_t mz_zip_get_stream(void *handle, void **stream); +/* Get a pointer to the stream used to open */ + +int32_t mz_zip_set_cd_stream(void *handle, int64_t cd_start_pos, void *cd_stream); +/* Sets the stream to use for reading the central dir */ + +int32_t mz_zip_get_cd_mem_stream(void *handle, void **cd_mem_stream); +/* Get a pointer to the stream used to store the central dir in memory */ + +int32_t mz_zip_set_number_entry(void *handle, uint64_t number_entry); +/* Sets the total number of entries */ + +int32_t mz_zip_get_number_entry(void *handle, uint64_t *number_entry); +/* Get the total number of entries */ + +int32_t mz_zip_set_disk_number_with_cd(void *handle, uint32_t disk_number_with_cd); +/* Sets the disk number containing the central directory record */ + +int32_t mz_zip_get_disk_number_with_cd(void *handle, uint32_t *disk_number_with_cd); +/* Get the disk number containing the central directory record */ + +/***************************************************************************/ + +int32_t mz_zip_entry_is_open(void *handle); +/* Check to see if entry is open for read/write */ + +int32_t mz_zip_entry_read_open(void *handle, uint8_t raw, const char *password); +/* Open for reading the current file in the zip file */ + +int32_t mz_zip_entry_read(void *handle, void *buf, int32_t len); +/* Read bytes from the current file in the zip file */ + +int32_t mz_zip_entry_read_close(void *handle, uint32_t *crc32, int64_t *compressed_size, + int64_t *uncompressed_size); +/* Close the current file for reading and get data descriptor values */ + +int32_t mz_zip_entry_write_open(void *handle, const mz_zip_file *file_info, + int16_t compress_level, uint8_t raw, const char *password); +/* Open for writing the current file in the zip file */ + +int32_t mz_zip_entry_write(void *handle, const void *buf, int32_t len); +/* Write bytes from the current file in the zip file */ + +int32_t mz_zip_entry_write_close(void *handle, uint32_t crc32, int64_t compressed_size, + int64_t uncompressed_size); +/* Close the current file for writing and set data descriptor values */ + +int32_t mz_zip_entry_seek_local_header(void *handle); +/* Seeks to the local header for the entry */ + +int32_t mz_zip_entry_close_raw(void *handle, int64_t uncompressed_size, uint32_t crc32); +/* Close the current file in the zip file where raw is compressed data */ + +int32_t mz_zip_entry_close(void *handle); +/* Close the current file in the zip file */ + +/***************************************************************************/ + +int32_t mz_zip_entry_is_dir(void *handle); +/* Checks to see if the entry is a directory */ + +int32_t mz_zip_entry_is_symlink(void *handle); +/* Checks to see if the entry is a symbolic link */ + +int32_t mz_zip_entry_get_info(void *handle, mz_zip_file **file_info); +/* Get info about the current file, only valid while current entry is open */ + +int32_t mz_zip_entry_get_local_info(void *handle, mz_zip_file **local_file_info); +/* Get local info about the current file, only valid while current entry is being read */ + +int32_t mz_zip_entry_set_extrafield(void *handle, const uint8_t *extrafield, uint16_t extrafield_size); +/* Sets or updates the extra field for the entry to be used before writing cd */ + +int64_t mz_zip_get_entry(void *handle); +/* Return offset of the current entry in the zip file */ + +int32_t mz_zip_goto_entry(void *handle, int64_t cd_pos); +/* Go to specified entry in the zip file */ + +int32_t mz_zip_goto_first_entry(void *handle); +/* Go to the first entry in the zip file */ + +int32_t mz_zip_goto_next_entry(void *handle); +/* Go to the next entry in the zip file or MZ_END_OF_LIST if reaching the end */ + +int32_t mz_zip_locate_entry(void *handle, const char *filename, uint8_t ignore_case); +/* Locate the file with the specified name in the zip file or MZ_END_LIST if not found */ + +int32_t mz_zip_locate_first_entry(void *handle, void *userdata, mz_zip_locate_entry_cb cb); +/* Locate the first matching entry based on a match callback */ + +int32_t mz_zip_locate_next_entry(void *handle, void *userdata, mz_zip_locate_entry_cb cb); +/* Locate the next matching entry based on a match callback */ + +/***************************************************************************/ + +int32_t mz_zip_attrib_is_dir(uint32_t attrib, int32_t version_madeby); +/* Checks to see if the attribute is a directory based on platform */ + +int32_t mz_zip_attrib_is_symlink(uint32_t attrib, int32_t version_madeby); +/* Checks to see if the attribute is a symbolic link based on platform */ + +int32_t mz_zip_attrib_convert(uint8_t src_sys, uint32_t src_attrib, uint8_t target_sys, + uint32_t *target_attrib); +/* Converts file attributes from one host system to another */ + +int32_t mz_zip_attrib_posix_to_win32(uint32_t posix_attrib, uint32_t *win32_attrib); +/* Converts posix file attributes to win32 file attributes */ + +int32_t mz_zip_attrib_win32_to_posix(uint32_t win32_attrib, uint32_t *posix_attrib); +/* Converts win32 file attributes to posix file attributes */ + +/***************************************************************************/ + +int32_t mz_zip_extrafield_find(void *stream, uint16_t type, int32_t max_seek, uint16_t *length); +/* Seeks to extra field by its type and returns its length */ + +int32_t mz_zip_extrafield_contains(const uint8_t *extrafield, int32_t extrafield_size, + uint16_t type, uint16_t *length); +/* Gets whether an extrafield exists and its size */ + +int32_t mz_zip_extrafield_read(void *stream, uint16_t *type, uint16_t *length); +/* Reads an extrafield header from a stream */ + +int32_t mz_zip_extrafield_write(void *stream, uint16_t type, uint16_t length); +/* Writes an extrafield header to a stream */ + +/***************************************************************************/ + +int32_t mz_zip_dosdate_to_tm(uint64_t dos_date, struct tm *ptm); +/* Convert dos date/time format to struct tm */ + +time_t mz_zip_dosdate_to_time_t(uint64_t dos_date); +/* Convert dos date/time format to time_t */ + +int32_t mz_zip_time_t_to_tm(time_t unix_time, struct tm *ptm); +/* Convert time_t to time struct */ + +uint32_t mz_zip_time_t_to_dos_date(time_t unix_time); +/* Convert time_t to dos date/time format */ + +uint32_t mz_zip_tm_to_dosdate(const struct tm *ptm); +/* Convert struct tm to dos date/time format */ + +int32_t mz_zip_ntfs_to_unix_time(uint64_t ntfs_time, time_t *unix_time); +/* Convert ntfs time to unix time */ + +int32_t mz_zip_unix_to_ntfs_time(time_t unix_time, uint64_t *ntfs_time); +/* Convert unix time to ntfs time */ + +/***************************************************************************/ + +int32_t mz_zip_path_compare(const char *path1, const char *path2, uint8_t ignore_case); +/* Compare two paths without regard to slashes */ + +/***************************************************************************/ + +const +char* mz_zip_get_compression_method_string(int32_t compression_method); +/* Gets a string representing the compression method */ + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif /* _ZIP_H */ diff --git a/Externals/minizip/mz_zip_rw.c b/Externals/minizip/mz_zip_rw.c new file mode 100644 index 000000000000..464e55905700 --- /dev/null +++ b/Externals/minizip/mz_zip_rw.c @@ -0,0 +1,1943 @@ +/* mz_zip_rw.c -- Zip reader/writer + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#include "mz.h" +#include "mz_crypt.h" +#include "mz_os.h" +#include "mz_strm.h" +#include "mz_strm_buf.h" +#include "mz_strm_mem.h" +#include "mz_strm_os.h" +#include "mz_strm_split.h" +#include "mz_strm_wzaes.h" +#include "mz_zip.h" + +#include "mz_zip_rw.h" + +/***************************************************************************/ + +#define MZ_DEFAULT_PROGRESS_INTERVAL (1000u) + +#define MZ_ZIP_CD_FILENAME ("__cdcd__") + +/***************************************************************************/ + +typedef struct mz_zip_reader_s { + void *zip_handle; + void *file_stream; + void *buffered_stream; + void *split_stream; + void *mem_stream; + void *hash; + uint16_t hash_algorithm; + uint16_t hash_digest_size; + mz_zip_file *file_info; + const char *pattern; + uint8_t pattern_ignore_case; + const char *password; + void *overwrite_userdata; + mz_zip_reader_overwrite_cb + overwrite_cb; + void *password_userdata; + mz_zip_reader_password_cb + password_cb; + void *progress_userdata; + mz_zip_reader_progress_cb + progress_cb; + uint32_t progress_cb_interval_ms; + void *entry_userdata; + mz_zip_reader_entry_cb + entry_cb; + uint8_t raw; + uint8_t buffer[UINT16_MAX]; + int32_t encoding; + uint8_t sign_required; + uint8_t cd_verified; + uint8_t cd_zipped; + uint8_t entry_verified; + uint8_t recover; +} mz_zip_reader; + +/***************************************************************************/ + +int32_t mz_zip_reader_is_open(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (reader == NULL) + return MZ_PARAM_ERROR; + if (reader->zip_handle == NULL) + return MZ_PARAM_ERROR; + return MZ_OK; +} + +int32_t mz_zip_reader_open(void *handle, void *stream) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + reader->cd_verified = 0; + reader->cd_zipped = 0; + + mz_zip_create(&reader->zip_handle); + mz_zip_set_recover(reader->zip_handle, reader->recover); + + err = mz_zip_open(reader->zip_handle, stream, MZ_OPEN_MODE_READ); + + if (err != MZ_OK) { + mz_zip_reader_close(handle); + return err; + } + + mz_zip_reader_unzip_cd(reader); + return MZ_OK; +} + +int32_t mz_zip_reader_open_file(void *handle, const char *path) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + + mz_zip_reader_close(handle); + + mz_stream_os_create(&reader->file_stream); + mz_stream_buffered_create(&reader->buffered_stream); + mz_stream_split_create(&reader->split_stream); + + mz_stream_set_base(reader->buffered_stream, reader->file_stream); + mz_stream_set_base(reader->split_stream, reader->buffered_stream); + + err = mz_stream_open(reader->split_stream, path, MZ_OPEN_MODE_READ); + if (err == MZ_OK) + err = mz_zip_reader_open(handle, reader->split_stream); + return err; +} + +int32_t mz_zip_reader_open_file_in_memory(void *handle, const char *path) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + void *file_stream = NULL; + int64_t file_size = 0; + int32_t err = 0; + + + mz_zip_reader_close(handle); + + mz_stream_os_create(&file_stream); + + err = mz_stream_os_open(file_stream, path, MZ_OPEN_MODE_READ); + + if (err != MZ_OK) { + mz_stream_os_delete(&file_stream); + mz_zip_reader_close(handle); + return err; + } + + mz_stream_os_seek(file_stream, 0, MZ_SEEK_END); + file_size = mz_stream_os_tell(file_stream); + mz_stream_os_seek(file_stream, 0, MZ_SEEK_SET); + + if ((file_size <= 0) || (file_size > UINT32_MAX)) { + /* Memory size is too large or too small */ + + mz_stream_os_close(file_stream); + mz_stream_os_delete(&file_stream); + mz_zip_reader_close(handle); + return MZ_MEM_ERROR; + } + + mz_stream_mem_create(&reader->mem_stream); + mz_stream_mem_set_grow_size(reader->mem_stream, (int32_t)file_size); + mz_stream_mem_open(reader->mem_stream, NULL, MZ_OPEN_MODE_CREATE); + + err = mz_stream_copy(reader->mem_stream, file_stream, (int32_t)file_size); + + mz_stream_os_close(file_stream); + mz_stream_os_delete(&file_stream); + + if (err == MZ_OK) + err = mz_zip_reader_open(handle, reader->mem_stream); + if (err != MZ_OK) + mz_zip_reader_close(handle); + + return err; +} + +int32_t mz_zip_reader_open_buffer(void *handle, uint8_t *buf, int32_t len, uint8_t copy) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + mz_zip_reader_close(handle); + + mz_stream_mem_create(&reader->mem_stream); + + if (copy) { + mz_stream_mem_set_grow_size(reader->mem_stream, len); + mz_stream_mem_open(reader->mem_stream, NULL, MZ_OPEN_MODE_CREATE); + mz_stream_mem_write(reader->mem_stream, buf, len); + mz_stream_mem_seek(reader->mem_stream, 0, MZ_SEEK_SET); + } else { + mz_stream_mem_open(reader->mem_stream, NULL, MZ_OPEN_MODE_READ); + mz_stream_mem_set_buffer(reader->mem_stream, buf, len); + } + + if (err == MZ_OK) + err = mz_zip_reader_open(handle, reader->mem_stream); + + return err; +} + +int32_t mz_zip_reader_close(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + if (reader->zip_handle != NULL) { + err = mz_zip_close(reader->zip_handle); + mz_zip_delete(&reader->zip_handle); + } + + if (reader->split_stream != NULL) { + mz_stream_split_close(reader->split_stream); + mz_stream_split_delete(&reader->split_stream); + } + + if (reader->buffered_stream != NULL) + mz_stream_buffered_delete(&reader->buffered_stream); + + if (reader->file_stream != NULL) + mz_stream_os_delete(&reader->file_stream); + + if (reader->mem_stream != NULL) { + mz_stream_mem_close(reader->mem_stream); + mz_stream_mem_delete(&reader->mem_stream); + } + + return err; +} + +/***************************************************************************/ + +int32_t mz_zip_reader_unzip_cd(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + mz_zip_file *cd_info = NULL; + void *cd_mem_stream = NULL; + void *new_cd_stream = NULL; + void *file_extra_stream = NULL; + uint64_t number_entry = 0; + int32_t err = MZ_OK; + + + err = mz_zip_reader_goto_first_entry(handle); + if (err != MZ_OK) + return err; + err = mz_zip_reader_entry_get_info(handle, &cd_info); + if (err != MZ_OK) + return err; + + if (strcmp(cd_info->filename, MZ_ZIP_CD_FILENAME) != 0) + return mz_zip_reader_goto_first_entry(handle); + + err = mz_zip_reader_entry_open(handle); + if (err != MZ_OK) + return err; + + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_set_buffer(file_extra_stream, (void *)cd_info->extrafield, cd_info->extrafield_size); + + err = mz_zip_extrafield_find(file_extra_stream, MZ_ZIP_EXTENSION_CDCD, INT32_MAX, NULL); + if (err == MZ_OK) + err = mz_stream_read_uint64(file_extra_stream, &number_entry); + + mz_stream_mem_delete(&file_extra_stream); + + if (err != MZ_OK) + return err; + + mz_zip_get_cd_mem_stream(reader->zip_handle, &cd_mem_stream); + if (mz_stream_mem_is_open(cd_mem_stream) != MZ_OK) + mz_stream_mem_open(cd_mem_stream, NULL, MZ_OPEN_MODE_CREATE); + + err = mz_stream_seek(cd_mem_stream, 0, MZ_SEEK_SET); + if (err == MZ_OK) + err = mz_stream_copy_stream(cd_mem_stream, NULL, handle, mz_zip_reader_entry_read, + (int32_t)cd_info->uncompressed_size); + + if (err == MZ_OK) { + reader->cd_zipped = 1; + + mz_zip_set_cd_stream(reader->zip_handle, 0, cd_mem_stream); + mz_zip_set_number_entry(reader->zip_handle, number_entry); + + err = mz_zip_reader_goto_first_entry(handle); + } + + reader->cd_verified = reader->entry_verified; + + mz_stream_mem_delete(&new_cd_stream); + return err; +} + +/***************************************************************************/ + +static int32_t mz_zip_reader_locate_entry_cb(void *handle, void *userdata, mz_zip_file *file_info) { + mz_zip_reader *reader = (mz_zip_reader *)userdata; + int32_t result = 0; + MZ_UNUSED(handle); + result = mz_path_compare_wc(file_info->filename, reader->pattern, reader->pattern_ignore_case); + return result; +} + +int32_t mz_zip_reader_goto_first_entry(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + if (mz_zip_reader_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + + if (mz_zip_entry_is_open(reader->zip_handle) == MZ_OK) + mz_zip_reader_entry_close(handle); + + if (reader->pattern == NULL) + err = mz_zip_goto_first_entry(reader->zip_handle); + else + err = mz_zip_locate_first_entry(reader->zip_handle, reader, mz_zip_reader_locate_entry_cb); + + reader->file_info = NULL; + if (err == MZ_OK) + err = mz_zip_entry_get_info(reader->zip_handle, &reader->file_info); + + return err; +} + +int32_t mz_zip_reader_goto_next_entry(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + if (mz_zip_reader_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + + if (mz_zip_entry_is_open(reader->zip_handle) == MZ_OK) + mz_zip_reader_entry_close(handle); + + if (reader->pattern == NULL) + err = mz_zip_goto_next_entry(reader->zip_handle); + else + err = mz_zip_locate_next_entry(reader->zip_handle, reader, mz_zip_reader_locate_entry_cb); + + reader->file_info = NULL; + if (err == MZ_OK) + err = mz_zip_entry_get_info(reader->zip_handle, &reader->file_info); + + return err; +} + +int32_t mz_zip_reader_locate_entry(void *handle, const char *filename, uint8_t ignore_case) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + + if (mz_zip_entry_is_open(reader->zip_handle) == MZ_OK) + mz_zip_reader_entry_close(handle); + + err = mz_zip_locate_entry(reader->zip_handle, filename, ignore_case); + + reader->file_info = NULL; + if (err == MZ_OK) + err = mz_zip_entry_get_info(reader->zip_handle, &reader->file_info); + + return err; +} + +/***************************************************************************/ + +int32_t mz_zip_reader_entry_open(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + const char *password = NULL; + char password_buf[120]; + + + reader->entry_verified = 0; + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (reader->file_info == NULL) + return MZ_PARAM_ERROR; + + /* If the entry isn't open for reading, open it */ + if (mz_zip_entry_is_open(reader->zip_handle) == MZ_OK) + return MZ_OK; + + password = reader->password; + + /* Check if we need a password and ask for it if we need to */ + if ((reader->file_info->flag & MZ_ZIP_FLAG_ENCRYPTED) && (password == NULL) && + (reader->password_cb != NULL)) { + reader->password_cb(handle, reader->password_userdata, reader->file_info, + password_buf, sizeof(password_buf)); + + password = password_buf; + } + + err = mz_zip_entry_read_open(reader->zip_handle, reader->raw, password); +#ifndef MZ_ZIP_NO_CRYPTO + if (err != MZ_OK) + return err; + + if (mz_zip_reader_entry_get_first_hash(handle, &reader->hash_algorithm, &reader->hash_digest_size) == MZ_OK) { + mz_crypt_sha_create(&reader->hash); + if (reader->hash_algorithm == MZ_HASH_SHA1) + mz_crypt_sha_set_algorithm(reader->hash, MZ_HASH_SHA1); + else if (reader->hash_algorithm == MZ_HASH_SHA256) + mz_crypt_sha_set_algorithm(reader->hash, MZ_HASH_SHA256); + else + err = MZ_SUPPORT_ERROR; + + if (err == MZ_OK) + mz_crypt_sha_begin(reader->hash); +#ifdef MZ_ZIP_SIGNING + if (err == MZ_OK) { + if (mz_zip_reader_entry_has_sign(handle) == MZ_OK) { + err = mz_zip_reader_entry_sign_verify(handle); + if (err == MZ_OK) + reader->entry_verified = 1; + } else if (reader->sign_required && !reader->cd_verified) + err = MZ_SIGN_ERROR; + } +#endif + } else if (reader->sign_required && !reader->cd_verified) + err = MZ_SIGN_ERROR; +#endif + + return err; +} + +int32_t mz_zip_reader_entry_close(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + int32_t err_close = MZ_OK; +#ifndef MZ_ZIP_NO_CRYPTO + int32_t err_hash = MZ_OK; + uint8_t computed_hash[MZ_HASH_MAX_SIZE]; + uint8_t expected_hash[MZ_HASH_MAX_SIZE]; + + if (reader->hash != NULL) { + mz_crypt_sha_end(reader->hash, computed_hash, sizeof(computed_hash)); + mz_crypt_sha_delete(&reader->hash); + + err_hash = mz_zip_reader_entry_get_hash(handle, reader->hash_algorithm, expected_hash, + reader->hash_digest_size); + + if (err_hash == MZ_OK) { + /* Verify expected hash against computed hash */ + if (memcmp(computed_hash, expected_hash, reader->hash_digest_size) != 0) + err = MZ_CRC_ERROR; + } + } +#endif + + err_close = mz_zip_entry_close(reader->zip_handle); + if (err == MZ_OK) + err = err_close; + return err; +} + +int32_t mz_zip_reader_entry_read(void *handle, void *buf, int32_t len) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t read = 0; + read = mz_zip_entry_read(reader->zip_handle, buf, len); +#ifndef MZ_ZIP_NO_CRYPTO + if ((read > 0) && (reader->hash != NULL)) + mz_crypt_sha_update(reader->hash, buf, read); +#endif + return read; +} + +int32_t mz_zip_reader_entry_has_sign(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + + if (reader == NULL || mz_zip_entry_is_open(reader->zip_handle) != MZ_OK) + return MZ_PARAM_ERROR; + + return mz_zip_extrafield_contains(reader->file_info->extrafield, + reader->file_info->extrafield_size, MZ_ZIP_EXTENSION_SIGN, NULL); +} + +#if !defined(MZ_ZIP_NO_CRYPTO) && defined(MZ_ZIP_SIGNING) +int32_t mz_zip_reader_entry_sign_verify(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + void *file_extra_stream = NULL; + int32_t err = MZ_OK; + uint8_t *signature = NULL; + uint16_t signature_size = 0; + uint8_t hash[MZ_HASH_MAX_SIZE]; + + if (reader == NULL || mz_zip_entry_is_open(reader->zip_handle) != MZ_OK) + return MZ_PARAM_ERROR; + + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_set_buffer(file_extra_stream, (void *)reader->file_info->extrafield, + reader->file_info->extrafield_size); + + err = mz_zip_extrafield_find(file_extra_stream, MZ_ZIP_EXTENSION_SIGN, INT32_MAX, &signature_size); + if ((err == MZ_OK) && (signature_size > 0)) { + signature = (uint8_t *)MZ_ALLOC(signature_size); + if (mz_stream_read(file_extra_stream, signature, signature_size) != signature_size) + err = MZ_READ_ERROR; + } + + mz_stream_mem_delete(&file_extra_stream); + + if (err == MZ_OK) { + /* Get most secure hash to verify signature against */ + err = mz_zip_reader_entry_get_hash(handle, reader->hash_algorithm, hash, reader->hash_digest_size); + } + + if (err == MZ_OK) { + /* Verify the pkcs signature */ + err = mz_crypt_sign_verify(hash, reader->hash_digest_size, signature, signature_size); + } + + if (signature != NULL) + MZ_FREE(signature); + + return err; +} +#endif + +int32_t mz_zip_reader_entry_get_hash(void *handle, uint16_t algorithm, uint8_t *digest, int32_t digest_size) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + void *file_extra_stream = NULL; + int32_t err = MZ_OK; + int32_t return_err = MZ_EXIST_ERROR; + uint16_t cur_algorithm = 0; + uint16_t cur_digest_size = 0; + + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_set_buffer(file_extra_stream, (void *)reader->file_info->extrafield, + reader->file_info->extrafield_size); + + do { + err = mz_zip_extrafield_find(file_extra_stream, MZ_ZIP_EXTENSION_HASH, INT32_MAX, NULL); + if (err != MZ_OK) + break; + + err = mz_stream_read_uint16(file_extra_stream, &cur_algorithm); + if (err == MZ_OK) + err = mz_stream_read_uint16(file_extra_stream, &cur_digest_size); + if ((err == MZ_OK) && (cur_algorithm == algorithm) && (cur_digest_size <= digest_size) && + (cur_digest_size <= MZ_HASH_MAX_SIZE)) { + /* Read hash digest */ + if (mz_stream_read(file_extra_stream, digest, digest_size) == cur_digest_size) + return_err = MZ_OK; + break; + } else { + err = mz_stream_seek(file_extra_stream, cur_digest_size, MZ_SEEK_CUR); + } + } while (err == MZ_OK); + + mz_stream_mem_delete(&file_extra_stream); + + return return_err; +} + +int32_t mz_zip_reader_entry_get_first_hash(void *handle, uint16_t *algorithm, uint16_t *digest_size) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + void *file_extra_stream = NULL; + int32_t err = MZ_OK; + uint16_t cur_algorithm = 0; + uint16_t cur_digest_size = 0; + + if (reader == NULL || algorithm == NULL) + return MZ_PARAM_ERROR; + + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_set_buffer(file_extra_stream, (void *)reader->file_info->extrafield, + reader->file_info->extrafield_size); + + err = mz_zip_extrafield_find(file_extra_stream, MZ_ZIP_EXTENSION_HASH, INT32_MAX, NULL); + if (err == MZ_OK) + err = mz_stream_read_uint16(file_extra_stream, &cur_algorithm); + if (err == MZ_OK) + err = mz_stream_read_uint16(file_extra_stream, &cur_digest_size); + + if (algorithm != NULL) + *algorithm = cur_algorithm; + if (digest_size != NULL) + *digest_size = cur_digest_size; + + mz_stream_mem_delete(&file_extra_stream); + + return err; +} + +int32_t mz_zip_reader_entry_get_info(void *handle, mz_zip_file **file_info) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + if (file_info == NULL || mz_zip_reader_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + *file_info = reader->file_info; + if (*file_info == NULL) + return MZ_EXIST_ERROR; + return err; +} + +int32_t mz_zip_reader_entry_is_dir(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (mz_zip_reader_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + return mz_zip_entry_is_dir(reader->zip_handle); +} + +int32_t mz_zip_reader_entry_save_process(void *handle, void *stream, mz_stream_write_cb write_cb) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + int32_t read = 0; + int32_t written = 0; + + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (reader->file_info == NULL) + return MZ_PARAM_ERROR; + if (write_cb == NULL) + return MZ_PARAM_ERROR; + + /* If the entry isn't open for reading, open it */ + if (mz_zip_entry_is_open(reader->zip_handle) != MZ_OK) + err = mz_zip_reader_entry_open(handle); + + if (err != MZ_OK) + return err; + + /* Unzip entry in zip file */ + read = mz_zip_reader_entry_read(handle, reader->buffer, sizeof(reader->buffer)); + + if (read == 0) { + /* If we are done close the entry */ + err = mz_zip_reader_entry_close(handle); + if (err != MZ_OK) + return err; + + return MZ_END_OF_STREAM; + } + + if (read > 0) { + /* Write the data to the specified stream */ + written = write_cb(stream, reader->buffer, read); + if (written != read) + return MZ_WRITE_ERROR; + } + + return read; +} + +int32_t mz_zip_reader_entry_save(void *handle, void *stream, mz_stream_write_cb write_cb) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + uint64_t current_time = 0; + uint64_t update_time = 0; + int64_t current_pos = 0; + int64_t update_pos = 0; + int32_t err = MZ_OK; + int32_t written = 0; + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (reader->file_info == NULL) + return MZ_PARAM_ERROR; + + /* Update the progress at the beginning */ + if (reader->progress_cb != NULL) + reader->progress_cb(handle, reader->progress_userdata, reader->file_info, current_pos); + + /* Write data to stream until done */ + while (err == MZ_OK) { + written = mz_zip_reader_entry_save_process(handle, stream, write_cb); + if (written == MZ_END_OF_STREAM) + break; + if (written > 0) + current_pos += written; + if (written < 0) + err = written; + + /* Update progress if enough time have passed */ + current_time = mz_os_ms_time(); + if ((current_time - update_time) > reader->progress_cb_interval_ms) { + if (reader->progress_cb != NULL) + reader->progress_cb(handle, reader->progress_userdata, reader->file_info, current_pos); + + update_pos = current_pos; + update_time = current_time; + } + } + + /* Update the progress at the end */ + if (reader->progress_cb != NULL && update_pos != current_pos) + reader->progress_cb(handle, reader->progress_userdata, reader->file_info, current_pos); + + return err; +} + +int32_t mz_zip_reader_entry_save_file(void *handle, const char *path) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + void *stream = NULL; + uint32_t target_attrib = 0; + int32_t err_attrib = 0; + int32_t err = MZ_OK; + int32_t err_cb = MZ_OK; + char pathwfs[512]; + char directory[512]; + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (reader->file_info == NULL || path == NULL) + return MZ_PARAM_ERROR; + + /* Convert to forward slashes for unix which doesn't like backslashes */ + strncpy(pathwfs, path, sizeof(pathwfs) - 1); + pathwfs[sizeof(pathwfs) - 1] = 0; + mz_path_convert_slashes(pathwfs, MZ_PATH_SLASH_UNIX); + + if (reader->entry_cb != NULL) + reader->entry_cb(handle, reader->entry_userdata, reader->file_info, pathwfs); + + strncpy(directory, pathwfs, sizeof(directory) - 1); + directory[sizeof(directory) - 1] = 0; + mz_path_remove_filename(directory); + + /* If it is a directory entry then create a directory instead of writing file */ + if ((mz_zip_entry_is_dir(reader->zip_handle) == MZ_OK) && + (mz_zip_entry_is_symlink(reader->zip_handle) != MZ_OK)) { + err = mz_dir_make(directory); + return err; + } + + /* Check if file exists and ask if we want to overwrite */ + if ((mz_os_file_exists(pathwfs) == MZ_OK) && (reader->overwrite_cb != NULL)) { + err_cb = reader->overwrite_cb(handle, reader->overwrite_userdata, reader->file_info, pathwfs); + if (err_cb != MZ_OK) + return err; + /* We want to overwrite the file so we delete the existing one */ + mz_os_unlink(pathwfs); + } + + /* If symbolic link then properly construct destination path and link path */ + if (mz_zip_entry_is_symlink(reader->zip_handle) == MZ_OK) { + mz_path_remove_slash(pathwfs); + mz_path_remove_filename(directory); + } + + /* Create the output directory if it doesn't already exist */ + if (mz_os_is_dir(directory) != MZ_OK) { + err = mz_dir_make(directory); + if (err != MZ_OK) + return err; + } + + /* If it is a symbolic link then create symbolic link instead of writing file */ + if (mz_zip_entry_is_symlink(reader->zip_handle) == MZ_OK) { + mz_os_make_symlink(pathwfs, reader->file_info->linkname); + /* Don't check return value because we aren't validating symbolic link target */ + return err; + } + + /* Create the file on disk so we can save to it */ + mz_stream_os_create(&stream); + err = mz_stream_os_open(stream, pathwfs, MZ_OPEN_MODE_CREATE); + + if (err == MZ_OK) + err = mz_zip_reader_entry_save(handle, stream, mz_stream_write); + + mz_stream_close(stream); + mz_stream_delete(&stream); + + if (err == MZ_OK) { + /* Set the time of the file that has been created */ + mz_os_set_file_date(pathwfs, reader->file_info->modified_date, + reader->file_info->accessed_date, reader->file_info->creation_date); + } + + if (err == MZ_OK) { + /* Set file attributes for the correct system */ + err_attrib = mz_zip_attrib_convert(MZ_HOST_SYSTEM(reader->file_info->version_madeby), + reader->file_info->external_fa, MZ_VERSION_MADEBY_HOST_SYSTEM, &target_attrib); + + if (err_attrib == MZ_OK) + mz_os_set_file_attribs(pathwfs, target_attrib); + } + + return err; +} + +int32_t mz_zip_reader_entry_save_buffer(void *handle, void *buf, int32_t len) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + void *mem_stream = NULL; + int32_t err = MZ_OK; + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (reader->file_info == NULL) + return MZ_PARAM_ERROR; + if (reader->file_info->uncompressed_size > INT32_MAX) + return MZ_PARAM_ERROR; + if (len != (int32_t)reader->file_info->uncompressed_size) + return MZ_BUF_ERROR; + + /* Create a memory stream backed by our buffer and save to it */ + mz_stream_mem_create(&mem_stream); + mz_stream_mem_set_buffer(mem_stream, buf, len); + + err = mz_stream_mem_open(mem_stream, NULL, MZ_OPEN_MODE_READ); + if (err == MZ_OK) + err = mz_zip_reader_entry_save(handle, mem_stream, mz_stream_mem_write); + + mz_stream_mem_delete(&mem_stream); + return err; +} + +int32_t mz_zip_reader_entry_save_buffer_length(void *handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (reader->file_info == NULL) + return MZ_PARAM_ERROR; + if (reader->file_info->uncompressed_size > INT32_MAX) + return MZ_PARAM_ERROR; + + /* Get the maximum size required for the save buffer */ + return (int32_t)reader->file_info->uncompressed_size; +} + +/***************************************************************************/ + +int32_t mz_zip_reader_save_all(void *handle, const char *destination_dir) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + int32_t err = MZ_OK; + uint8_t *utf8_string = NULL; + char path[512]; + char utf8_name[256]; + char resolved_name[256]; + + err = mz_zip_reader_goto_first_entry(handle); + + if (err == MZ_END_OF_LIST) + return err; + + while (err == MZ_OK) { + /* Construct output path */ + path[0] = 0; + + strncpy(utf8_name, reader->file_info->filename, sizeof(utf8_name) - 1); + utf8_name[sizeof(utf8_name) - 1] = 0; + + if ((reader->encoding > 0) && (reader->file_info->flag & MZ_ZIP_FLAG_UTF8) == 0) { + utf8_string = mz_os_utf8_string_create(reader->file_info->filename, reader->encoding); + if (utf8_string) { + strncpy(utf8_name, (char *)utf8_string, sizeof(utf8_name) - 1); + utf8_name[sizeof(utf8_name) - 1] = 0; + mz_os_utf8_string_delete(&utf8_string); + } + } + + err = mz_path_resolve(utf8_name, resolved_name, sizeof(resolved_name)); + if (err != MZ_OK) + break; + + if (destination_dir != NULL) + mz_path_combine(path, destination_dir, sizeof(path)); + + mz_path_combine(path, resolved_name, sizeof(path)); + + /* Save file to disk */ + err = mz_zip_reader_entry_save_file(handle, path); + + if (err == MZ_OK) + err = mz_zip_reader_goto_next_entry(handle); + } + + if (err == MZ_END_OF_LIST) + return MZ_OK; + + return err; +} + +/***************************************************************************/ + +void mz_zip_reader_set_pattern(void *handle, const char *pattern, uint8_t ignore_case) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->pattern = pattern; + reader->pattern_ignore_case = ignore_case; +} + +void mz_zip_reader_set_password(void *handle, const char *password) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->password = password; +} + +void mz_zip_reader_set_raw(void *handle, uint8_t raw) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->raw = raw; +} + +int32_t mz_zip_reader_get_raw(void *handle, uint8_t *raw) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (raw == NULL) + return MZ_PARAM_ERROR; + *raw = reader->raw; + return MZ_OK; +} + +int32_t mz_zip_reader_get_zip_cd(void *handle, uint8_t *zip_cd) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (zip_cd == NULL) + return MZ_PARAM_ERROR; + *zip_cd = reader->cd_zipped; + return MZ_OK; +} + +int32_t mz_zip_reader_get_comment(void *handle, const char **comment) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (comment == NULL) + return MZ_PARAM_ERROR; + return mz_zip_get_comment(reader->zip_handle, comment); +} + +int32_t mz_zip_reader_set_recover(void *handle, uint8_t recover) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (reader == NULL) + return MZ_PARAM_ERROR; + reader->recover = recover; + return MZ_OK; +} + +void mz_zip_reader_set_encoding(void *handle, int32_t encoding) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->encoding = encoding; +} + +void mz_zip_reader_set_sign_required(void *handle, uint8_t sign_required) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->sign_required = sign_required; +} + +void mz_zip_reader_set_overwrite_cb(void *handle, void *userdata, mz_zip_reader_overwrite_cb cb) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->overwrite_cb = cb; + reader->overwrite_userdata = userdata; +} + +void mz_zip_reader_set_password_cb(void *handle, void *userdata, mz_zip_reader_password_cb cb) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->password_cb = cb; + reader->password_userdata = userdata; +} + +void mz_zip_reader_set_progress_cb(void *handle, void *userdata, mz_zip_reader_progress_cb cb) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->progress_cb = cb; + reader->progress_userdata = userdata; +} + +void mz_zip_reader_set_progress_interval(void *handle, uint32_t milliseconds) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->progress_cb_interval_ms = milliseconds; +} + +void mz_zip_reader_set_entry_cb(void *handle, void *userdata, mz_zip_reader_entry_cb cb) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + reader->entry_cb = cb; + reader->entry_userdata = userdata; +} + +int32_t mz_zip_reader_get_zip_handle(void *handle, void **zip_handle) { + mz_zip_reader *reader = (mz_zip_reader *)handle; + if (zip_handle == NULL) + return MZ_PARAM_ERROR; + *zip_handle = reader->zip_handle; + if (*zip_handle == NULL) + return MZ_EXIST_ERROR; + return MZ_OK; +} + +/***************************************************************************/ + +void *mz_zip_reader_create(void **handle) { + mz_zip_reader *reader = NULL; + + reader = (mz_zip_reader *)MZ_ALLOC(sizeof(mz_zip_reader)); + if (reader != NULL) { + memset(reader, 0, sizeof(mz_zip_reader)); + reader->recover = 1; + reader->progress_cb_interval_ms = MZ_DEFAULT_PROGRESS_INTERVAL; + } + if (handle != NULL) + *handle = reader; + + return reader; +} + +void mz_zip_reader_delete(void **handle) { + mz_zip_reader *reader = NULL; + if (handle == NULL) + return; + reader = (mz_zip_reader *)*handle; + if (reader != NULL) { + mz_zip_reader_close(reader); + MZ_FREE(reader); + } + *handle = NULL; +} + +/***************************************************************************/ + +typedef struct mz_zip_writer_s { + void *zip_handle; + void *file_stream; + void *buffered_stream; + void *split_stream; + void *sha256; + void *mem_stream; + void *file_extra_stream; + mz_zip_file file_info; + void *overwrite_userdata; + mz_zip_writer_overwrite_cb + overwrite_cb; + void *password_userdata; + mz_zip_writer_password_cb + password_cb; + void *progress_userdata; + mz_zip_writer_progress_cb + progress_cb; + uint32_t progress_cb_interval_ms; + void *entry_userdata; + mz_zip_writer_entry_cb + entry_cb; + const char *password; + const char *comment; + uint8_t *cert_data; + int32_t cert_data_size; + const char *cert_pwd; + uint16_t compress_method; + int16_t compress_level; + uint8_t follow_links; + uint8_t store_links; + uint8_t zip_cd; + uint8_t aes; + uint8_t raw; + uint8_t buffer[UINT16_MAX]; +} mz_zip_writer; + +/***************************************************************************/ + +int32_t mz_zip_writer_zip_cd(void *handle) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + mz_zip_file cd_file; + uint64_t number_entry = 0; + int64_t cd_mem_length = 0; + int32_t err = MZ_OK; + int32_t extrafield_size = 0; + void *file_extra_stream = NULL; + void *cd_mem_stream = NULL; + + + memset(&cd_file, 0, sizeof(cd_file)); + + mz_zip_get_number_entry(writer->zip_handle, &number_entry); + mz_zip_get_cd_mem_stream(writer->zip_handle, &cd_mem_stream); + mz_stream_seek(cd_mem_stream, 0, MZ_SEEK_END); + cd_mem_length = (uint32_t)mz_stream_tell(cd_mem_stream); + mz_stream_seek(cd_mem_stream, 0, MZ_SEEK_SET); + + cd_file.filename = MZ_ZIP_CD_FILENAME; + cd_file.modified_date = time(NULL); + cd_file.version_madeby = MZ_VERSION_MADEBY; + cd_file.compression_method = writer->compress_method; + cd_file.uncompressed_size = (int32_t)cd_mem_length; + cd_file.flag = MZ_ZIP_FLAG_UTF8; + + if (writer->password != NULL) + cd_file.flag |= MZ_ZIP_FLAG_ENCRYPTED; + + mz_stream_mem_create(&file_extra_stream); + mz_stream_mem_open(file_extra_stream, NULL, MZ_OPEN_MODE_CREATE); + + mz_zip_extrafield_write(file_extra_stream, MZ_ZIP_EXTENSION_CDCD, 8); + + mz_stream_write_uint64(file_extra_stream, number_entry); + + mz_stream_mem_get_buffer(file_extra_stream, (const void **)&cd_file.extrafield); + mz_stream_mem_get_buffer_length(file_extra_stream, &extrafield_size); + cd_file.extrafield_size = (uint16_t)extrafield_size; + + err = mz_zip_writer_entry_open(handle, &cd_file); + if (err == MZ_OK) { + mz_stream_copy_stream(handle, mz_zip_writer_entry_write, cd_mem_stream, + NULL, (int32_t)cd_mem_length); + + mz_stream_seek(cd_mem_stream, 0, MZ_SEEK_SET); + mz_stream_mem_set_buffer_limit(cd_mem_stream, 0); + + err = mz_zip_writer_entry_close(writer); + } + + mz_stream_mem_delete(&file_extra_stream); + + return err; +} + +/***************************************************************************/ + +int32_t mz_zip_writer_is_open(void *handle) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + if (writer == NULL) + return MZ_PARAM_ERROR; + if (writer->zip_handle == NULL) + return MZ_PARAM_ERROR; + return MZ_OK; +} + +static int32_t mz_zip_writer_open_int(void *handle, void *stream, int32_t mode) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t err = MZ_OK; + + mz_zip_create(&writer->zip_handle); + err = mz_zip_open(writer->zip_handle, stream, mode); + + if (err != MZ_OK) { + mz_zip_writer_close(handle); + return err; + } + + return MZ_OK; +} + +int32_t mz_zip_writer_open(void *handle, void *stream, uint8_t append) { + int32_t mode = MZ_OPEN_MODE_WRITE; + + if (append) { + mode |= MZ_OPEN_MODE_APPEND; + } else { + mode |= MZ_OPEN_MODE_CREATE; + } + + return mz_zip_writer_open_int(handle, stream, mode); +} + +int32_t mz_zip_writer_open_file(void *handle, const char *path, int64_t disk_size, uint8_t append) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t mode = MZ_OPEN_MODE_READWRITE; + int32_t err = MZ_OK; + int32_t err_cb = 0; + char directory[320]; + + mz_zip_writer_close(handle); + + if (mz_os_file_exists(path) != MZ_OK) { + /* If the file doesn't exist, we don't append file */ + mode |= MZ_OPEN_MODE_CREATE; + + /* Create destination directory if it doesn't already exist */ + if (strchr(path, '/') != NULL || strrchr(path, '\\') != NULL) { + strncpy(directory, path, sizeof(directory)); + mz_path_remove_filename(directory); + if (mz_os_file_exists(directory) != MZ_OK) + mz_dir_make(directory); + } + } else if (append) { + mode |= MZ_OPEN_MODE_APPEND; + } else { + if (writer->overwrite_cb != NULL) + err_cb = writer->overwrite_cb(handle, writer->overwrite_userdata, path); + + if (err_cb == MZ_INTERNAL_ERROR) + return err; + + if (err_cb == MZ_OK) + mode |= MZ_OPEN_MODE_CREATE; + else + mode |= MZ_OPEN_MODE_APPEND; + } + + mz_stream_os_create(&writer->file_stream); + mz_stream_buffered_create(&writer->buffered_stream); + mz_stream_split_create(&writer->split_stream); + + mz_stream_set_base(writer->buffered_stream, writer->file_stream); + mz_stream_set_base(writer->split_stream, writer->buffered_stream); + + mz_stream_split_set_prop_int64(writer->split_stream, MZ_STREAM_PROP_DISK_SIZE, disk_size); + + err = mz_stream_open(writer->split_stream, path, mode); + if (err == MZ_OK) + err = mz_zip_writer_open_int(handle, writer->split_stream, mode); + + return err; +} + +int32_t mz_zip_writer_open_file_in_memory(void *handle, const char *path) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + void *file_stream = NULL; + int64_t file_size = 0; + int32_t err = 0; + + + mz_zip_writer_close(handle); + + mz_stream_os_create(&file_stream); + + err = mz_stream_os_open(file_stream, path, MZ_OPEN_MODE_READ); + + if (err != MZ_OK) { + mz_stream_os_delete(&file_stream); + mz_zip_writer_close(handle); + return err; + } + + mz_stream_os_seek(file_stream, 0, MZ_SEEK_END); + file_size = mz_stream_os_tell(file_stream); + mz_stream_os_seek(file_stream, 0, MZ_SEEK_SET); + + if ((file_size <= 0) || (file_size > UINT32_MAX)) { + /* Memory size is too large or too small */ + + mz_stream_os_close(file_stream); + mz_stream_os_delete(&file_stream); + mz_zip_writer_close(handle); + return MZ_MEM_ERROR; + } + + mz_stream_mem_create(&writer->mem_stream); + mz_stream_mem_set_grow_size(writer->mem_stream, (int32_t)file_size); + mz_stream_mem_open(writer->mem_stream, NULL, MZ_OPEN_MODE_CREATE); + + err = mz_stream_copy(writer->mem_stream, file_stream, (int32_t)file_size); + + mz_stream_os_close(file_stream); + mz_stream_os_delete(&file_stream); + + if (err == MZ_OK) + err = mz_zip_writer_open(handle, writer->mem_stream, 1); + if (err != MZ_OK) + mz_zip_writer_close(handle); + + return err; +} + +int32_t mz_zip_writer_close(void *handle) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t err = MZ_OK; + + + if (writer->zip_handle != NULL) { + mz_zip_set_version_madeby(writer->zip_handle, MZ_VERSION_MADEBY); + if (writer->comment) + mz_zip_set_comment(writer->zip_handle, writer->comment); + if (writer->zip_cd) + mz_zip_writer_zip_cd(writer); + + err = mz_zip_close(writer->zip_handle); + mz_zip_delete(&writer->zip_handle); + } + + if (writer->split_stream != NULL) { + mz_stream_split_close(writer->split_stream); + mz_stream_split_delete(&writer->split_stream); + } + + if (writer->buffered_stream != NULL) + mz_stream_buffered_delete(&writer->buffered_stream); + + if (writer->file_stream != NULL) + mz_stream_os_delete(&writer->file_stream); + + if (writer->mem_stream != NULL) { + mz_stream_mem_close(writer->mem_stream); + mz_stream_mem_delete(&writer->mem_stream); + } + + return err; +} + +/***************************************************************************/ + +int32_t mz_zip_writer_entry_open(void *handle, mz_zip_file *file_info) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t err = MZ_OK; + const char *password = NULL; + char password_buf[120]; + + /* Copy file info to access data upon close */ + memcpy(&writer->file_info, file_info, sizeof(mz_zip_file)); + + if (writer->entry_cb != NULL) + writer->entry_cb(handle, writer->entry_userdata, &writer->file_info); + + password = writer->password; + + /* Check if we need a password and ask for it if we need to */ + if ((writer->file_info.flag & MZ_ZIP_FLAG_ENCRYPTED) && (password == NULL) && + (writer->password_cb != NULL)) { + writer->password_cb(handle, writer->password_userdata, &writer->file_info, + password_buf, sizeof(password_buf)); + password = password_buf; + } + +#ifndef MZ_ZIP_NO_CRYPTO + if (mz_zip_attrib_is_dir(writer->file_info.external_fa, writer->file_info.version_madeby) != MZ_OK) { + /* Start calculating sha256 */ + mz_crypt_sha_create(&writer->sha256); + mz_crypt_sha_set_algorithm(writer->sha256, MZ_HASH_SHA256); + mz_crypt_sha_begin(writer->sha256); + } +#endif + + /* Open entry in zip */ + err = mz_zip_entry_write_open(writer->zip_handle, &writer->file_info, writer->compress_level, + writer->raw, password); + + return err; +} + +#if !defined(MZ_ZIP_NO_CRYPTO) && defined(MZ_ZIP_SIGNING) +int32_t mz_zip_writer_entry_sign(void *handle, uint8_t *message, int32_t message_size, + uint8_t *cert_data, int32_t cert_data_size, const char *cert_pwd) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t err = MZ_OK; + int32_t signature_size = 0; + uint8_t *signature = NULL; + + + if (writer == NULL || cert_data == NULL || cert_data_size <= 0) + return MZ_PARAM_ERROR; + if (mz_zip_entry_is_open(writer->zip_handle) != MZ_OK) + return MZ_PARAM_ERROR; + + /* Sign message with certificate */ + err = mz_crypt_sign(message, message_size, cert_data, cert_data_size, cert_pwd, + &signature, &signature_size); + + if ((err == MZ_OK) && (signature != NULL)) { + /* Write signature zip extra field */ + err = mz_zip_extrafield_write(writer->file_extra_stream, MZ_ZIP_EXTENSION_SIGN, + (uint16_t)signature_size); + + if (err == MZ_OK) { + if (mz_stream_write(writer->file_extra_stream, signature, signature_size) != signature_size) + err = MZ_WRITE_ERROR; + } + + MZ_FREE(signature); + } + + return err; +} +#endif + +int32_t mz_zip_writer_entry_close(void *handle) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t err = MZ_OK; +#ifndef MZ_ZIP_NO_CRYPTO + const uint8_t *extrafield = NULL; + int32_t extrafield_size = 0; + int16_t field_length_hash = 0; + uint8_t sha256[MZ_HASH_SHA256_SIZE]; + + + if (writer->sha256 != NULL) { + mz_crypt_sha_end(writer->sha256, sha256, sizeof(sha256)); + mz_crypt_sha_delete(&writer->sha256); + + /* Copy extrafield so we can append our own fields before close */ + mz_stream_mem_create(&writer->file_extra_stream); + mz_stream_mem_open(writer->file_extra_stream, NULL, MZ_OPEN_MODE_CREATE); + + /* Write sha256 hash to extrafield */ + field_length_hash = 4 + MZ_HASH_SHA256_SIZE; + err = mz_zip_extrafield_write(writer->file_extra_stream, MZ_ZIP_EXTENSION_HASH, field_length_hash); + if (err == MZ_OK) + err = mz_stream_write_uint16(writer->file_extra_stream, MZ_HASH_SHA256); + if (err == MZ_OK) + err = mz_stream_write_uint16(writer->file_extra_stream, MZ_HASH_SHA256_SIZE); + if (err == MZ_OK) { + if (mz_stream_write(writer->file_extra_stream, sha256, sizeof(sha256)) != MZ_HASH_SHA256_SIZE) + err = MZ_WRITE_ERROR; + } + +#ifdef MZ_ZIP_SIGNING + if ((err == MZ_OK) && (writer->cert_data != NULL) && (writer->cert_data_size > 0)) { + /* Sign entry if not zipping cd or if it is cd being zipped */ + if (!writer->zip_cd || strcmp(writer->file_info.filename, MZ_ZIP_CD_FILENAME) == 0) { + err = mz_zip_writer_entry_sign(handle, sha256, sizeof(sha256), + writer->cert_data, writer->cert_data_size, writer->cert_pwd); + } + } +#endif + + if ((writer->file_info.extrafield != NULL) && (writer->file_info.extrafield_size > 0)) + mz_stream_mem_write(writer->file_extra_stream, writer->file_info.extrafield, + writer->file_info.extrafield_size); + + /* Update extra field for central directory after adding extra fields */ + mz_stream_mem_get_buffer(writer->file_extra_stream, (const void **)&extrafield); + mz_stream_mem_get_buffer_length(writer->file_extra_stream, &extrafield_size); + + mz_zip_entry_set_extrafield(writer->zip_handle, extrafield, (uint16_t)extrafield_size); + } +#endif + + if (err == MZ_OK) { + if (writer->raw) + err = mz_zip_entry_close_raw(writer->zip_handle, writer->file_info.uncompressed_size, + writer->file_info.crc); + else + err = mz_zip_entry_close(writer->zip_handle); + } + + if (writer->file_extra_stream != NULL) + mz_stream_mem_delete(&writer->file_extra_stream); + + return err; +} + +int32_t mz_zip_writer_entry_write(void *handle, const void *buf, int32_t len) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t written = 0; + written = mz_zip_entry_write(writer->zip_handle, buf, len); +#ifndef MZ_ZIP_NO_CRYPTO + if ((written > 0) && (writer->sha256 != NULL)) + mz_crypt_sha_update(writer->sha256, buf, written); +#endif + return written; +} +/***************************************************************************/ + +int32_t mz_zip_writer_add_process(void *handle, void *stream, mz_stream_read_cb read_cb) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t read = 0; + int32_t written = 0; + int32_t err = MZ_OK; + + if (mz_zip_writer_is_open(writer) != MZ_OK) + return MZ_PARAM_ERROR; + /* If the entry isn't open for writing, open it */ + if (mz_zip_entry_is_open(writer->zip_handle) != MZ_OK) + return MZ_PARAM_ERROR; + if (read_cb == NULL) + return MZ_PARAM_ERROR; + + read = read_cb(stream, writer->buffer, sizeof(writer->buffer)); + if (read == 0) + return MZ_END_OF_STREAM; + if (read < 0) { + err = read; + return err; + } + + written = mz_zip_writer_entry_write(handle, writer->buffer, read); + if (written != read) + return MZ_WRITE_ERROR; + + return written; +} + +int32_t mz_zip_writer_add(void *handle, void *stream, mz_stream_read_cb read_cb) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + uint64_t current_time = 0; + uint64_t update_time = 0; + int64_t current_pos = 0; + int64_t update_pos = 0; + int32_t err = MZ_OK; + int32_t written = 0; + + /* Update the progress at the beginning */ + if (writer->progress_cb != NULL) + writer->progress_cb(handle, writer->progress_userdata, &writer->file_info, current_pos); + + /* Write data to stream until done */ + while (err == MZ_OK) { + written = mz_zip_writer_add_process(handle, stream, read_cb); + if (written == MZ_END_OF_STREAM) + break; + if (written > 0) + current_pos += written; + if (written < 0) + err = written; + + /* Update progress if enough time have passed */ + current_time = mz_os_ms_time(); + if ((current_time - update_time) > writer->progress_cb_interval_ms) { + if (writer->progress_cb != NULL) + writer->progress_cb(handle, writer->progress_userdata, &writer->file_info, current_pos); + + update_pos = current_pos; + update_time = current_time; + } + } + + /* Update the progress at the end */ + if (writer->progress_cb != NULL && update_pos != current_pos) + writer->progress_cb(handle, writer->progress_userdata, &writer->file_info, current_pos); + + return err; +} + +int32_t mz_zip_writer_add_info(void *handle, void *stream, mz_stream_read_cb read_cb, mz_zip_file *file_info) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + int32_t err = MZ_OK; + + + if (mz_zip_writer_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + if (file_info == NULL) + return MZ_PARAM_ERROR; + + /* Add to zip */ + err = mz_zip_writer_entry_open(handle, file_info); + if (err != MZ_OK) + return err; + + if (stream != NULL) { + if (mz_zip_attrib_is_dir(writer->file_info.external_fa, writer->file_info.version_madeby) != MZ_OK) { + err = mz_zip_writer_add(handle, stream, read_cb); + if (err != MZ_OK) + return err; + } + } + + err = mz_zip_writer_entry_close(handle); + + return err; +} + +int32_t mz_zip_writer_add_buffer(void *handle, void *buf, int32_t len, mz_zip_file *file_info) { + void *mem_stream = NULL; + int32_t err = MZ_OK; + + if (mz_zip_writer_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + if (buf == NULL) + return MZ_PARAM_ERROR; + + /* Create a memory stream backed by our buffer and add from it */ + mz_stream_mem_create(&mem_stream); + mz_stream_mem_set_buffer(mem_stream, buf, len); + + err = mz_stream_mem_open(mem_stream, NULL, MZ_OPEN_MODE_READ); + if (err == MZ_OK) + err = mz_zip_writer_add_info(handle, mem_stream, mz_stream_mem_read, file_info); + + mz_stream_mem_delete(&mem_stream); + return err; +} + +int32_t mz_zip_writer_add_file(void *handle, const char *path, const char *filename_in_zip) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + mz_zip_file file_info; + uint32_t target_attrib = 0; + uint32_t src_attrib = 0; + int32_t err = MZ_OK; + uint8_t src_sys = 0; + void *stream = NULL; + char link_path[1024]; + const char *filename = filename_in_zip; + + + if (mz_zip_writer_is_open(handle) != MZ_OK) + return MZ_PARAM_ERROR; + if (path == NULL) + return MZ_PARAM_ERROR; + + if (filename == NULL) { + err = mz_path_get_filename(path, &filename); + if (err != MZ_OK) + return err; + } + + memset(&file_info, 0, sizeof(file_info)); + + /* The path name saved, should not include a leading slash. */ + /* If it did, windows/xp and dynazip couldn't read the zip file. */ + + while (filename[0] == '\\' || filename[0] == '/') + filename += 1; + + /* Get information about the file on disk so we can store it in zip */ + + file_info.version_madeby = MZ_VERSION_MADEBY; + file_info.compression_method = writer->compress_method; + file_info.filename = filename; + file_info.uncompressed_size = mz_os_get_file_size(path); + file_info.flag = MZ_ZIP_FLAG_UTF8; + + if (writer->zip_cd) + file_info.flag |= MZ_ZIP_FLAG_MASK_LOCAL_INFO; + if (writer->aes) + file_info.aes_version = MZ_AES_VERSION; + + mz_os_get_file_date(path, &file_info.modified_date, &file_info.accessed_date, + &file_info.creation_date); + mz_os_get_file_attribs(path, &src_attrib); + + src_sys = MZ_HOST_SYSTEM(file_info.version_madeby); + + if ((src_sys != MZ_HOST_SYSTEM_MSDOS) && (src_sys != MZ_HOST_SYSTEM_WINDOWS_NTFS)) { + /* High bytes are OS specific attributes, low byte is always DOS attributes */ + if (mz_zip_attrib_convert(src_sys, src_attrib, MZ_HOST_SYSTEM_MSDOS, &target_attrib) == MZ_OK) + file_info.external_fa = target_attrib; + file_info.external_fa |= (src_attrib << 16); + } else { + file_info.external_fa = src_attrib; + } + + if (writer->store_links && mz_os_is_symlink(path) == MZ_OK) { + err = mz_os_read_symlink(path, link_path, sizeof(link_path)); + if (err == MZ_OK) + file_info.linkname = link_path; + } else if (mz_os_is_dir(path) != MZ_OK) { + mz_stream_os_create(&stream); + err = mz_stream_os_open(stream, path, MZ_OPEN_MODE_READ); + } + + if (err == MZ_OK) + err = mz_zip_writer_add_info(handle, stream, mz_stream_read, &file_info); + + if (stream != NULL) { + mz_stream_close(stream); + mz_stream_delete(&stream); + } + + return err; +} + +int32_t mz_zip_writer_add_path(void *handle, const char *path, const char *root_path, + uint8_t include_path, uint8_t recursive) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + DIR *dir = NULL; + struct dirent *entry = NULL; + int32_t err = MZ_OK; + int16_t is_dir = 0; + const char *filename = NULL; + const char *filenameinzip = path; + char *wildcard_ptr = NULL; + char full_path[1024]; + char path_dir[1024]; + + + if (strrchr(path, '*') != NULL) { + strncpy(path_dir, path, sizeof(path_dir) - 1); + path_dir[sizeof(path_dir) - 1] = 0; + mz_path_remove_filename(path_dir); + wildcard_ptr = path_dir + strlen(path_dir) + 1; + root_path = path = path_dir; + } else { + if (mz_os_is_dir(path) == MZ_OK) + is_dir = 1; + + /* Construct the filename that our file will be stored in the zip as */ + if (root_path == NULL) + root_path = path; + + /* Should the file be stored with any path info at all? */ + if (!include_path) { + if (!is_dir && root_path == path) { + if (mz_path_get_filename(filenameinzip, &filename) == MZ_OK) + filenameinzip = filename; + } else { + filenameinzip += strlen(root_path); + } + } + + if (!writer->store_links && !writer->follow_links) { + if (mz_os_is_symlink(path) == MZ_OK) + return err; + } + + if (*filenameinzip != 0) + err = mz_zip_writer_add_file(handle, path, filenameinzip); + + if (!is_dir) + return err; + + if (writer->store_links) { + if (mz_os_is_symlink(path) == MZ_OK) + return err; + } + } + + dir = mz_os_open_dir(path); + + if (dir == NULL) + return MZ_EXIST_ERROR; + + while ((entry = mz_os_read_dir(dir)) != NULL) { + if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) + continue; + + full_path[0] = 0; + mz_path_combine(full_path, path, sizeof(full_path)); + mz_path_combine(full_path, entry->d_name, sizeof(full_path)); + + if (!recursive && mz_os_is_dir(full_path) == MZ_OK) + continue; + + if ((wildcard_ptr != NULL) && (mz_path_compare_wc(entry->d_name, wildcard_ptr, 1) != MZ_OK)) + continue; + + err = mz_zip_writer_add_path(handle, full_path, root_path, include_path, recursive); + if (err != MZ_OK) + break; + } + + mz_os_close_dir(dir); + return err; +} + +int32_t mz_zip_writer_copy_from_reader(void *handle, void *reader) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + mz_zip_file *file_info = NULL; + int64_t compressed_size = 0; + int64_t uncompressed_size = 0; + uint32_t crc32 = 0; + int32_t err = MZ_OK; + uint8_t original_raw = 0; + void *reader_zip_handle = NULL; + void *writer_zip_handle = NULL; + + + if (mz_zip_reader_is_open(reader) != MZ_OK) + return MZ_PARAM_ERROR; + if (mz_zip_writer_is_open(writer) != MZ_OK) + return MZ_PARAM_ERROR; + + err = mz_zip_reader_entry_get_info(reader, &file_info); + + if (err != MZ_OK) + return err; + + mz_zip_reader_get_zip_handle(reader, &reader_zip_handle); + mz_zip_writer_get_zip_handle(writer, &writer_zip_handle); + + /* Open entry for raw reading */ + err = mz_zip_entry_read_open(reader_zip_handle, 1, NULL); + + if (err == MZ_OK) { + /* Write entry raw, save original raw value */ + original_raw = writer->raw; + writer->raw = 1; + + err = mz_zip_writer_entry_open(writer, file_info); + + if ((err == MZ_OK) && + (mz_zip_attrib_is_dir(writer->file_info.external_fa, writer->file_info.version_madeby) != MZ_OK)) { + err = mz_zip_writer_add(writer, reader_zip_handle, mz_zip_entry_read); + } + + if (err == MZ_OK) { + err = mz_zip_entry_read_close(reader_zip_handle, &crc32, &compressed_size, &uncompressed_size); + if (err == MZ_OK) + err = mz_zip_entry_write_close(writer_zip_handle, crc32, compressed_size, uncompressed_size); + } + + if (mz_zip_entry_is_open(reader_zip_handle) == MZ_OK) + mz_zip_entry_close(reader_zip_handle); + + if (mz_zip_entry_is_open(writer_zip_handle) == MZ_OK) + mz_zip_entry_close(writer_zip_handle); + + writer->raw = original_raw; + } + + return err; +} + +/***************************************************************************/ + +void mz_zip_writer_set_password(void *handle, const char *password) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->password = password; +} + +void mz_zip_writer_set_comment(void *handle, const char *comment) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->comment = comment; +} + +void mz_zip_writer_set_raw(void *handle, uint8_t raw) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->raw = raw; +} + +int32_t mz_zip_writer_get_raw(void *handle, uint8_t *raw) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + if (raw == NULL) + return MZ_PARAM_ERROR; + *raw = writer->raw; + return MZ_OK; +} + +void mz_zip_writer_set_aes(void *handle, uint8_t aes) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->aes = aes; +} + +void mz_zip_writer_set_compress_method(void *handle, uint16_t compress_method) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->compress_method = compress_method; +} + +void mz_zip_writer_set_compress_level(void *handle, int16_t compress_level) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->compress_level = compress_level; +} + +void mz_zip_writer_set_follow_links(void *handle, uint8_t follow_links) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->follow_links = follow_links; +} + +void mz_zip_writer_set_store_links(void *handle, uint8_t store_links) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->store_links = store_links; +} + +void mz_zip_writer_set_zip_cd(void *handle, uint8_t zip_cd) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->zip_cd = zip_cd; +} + +int32_t mz_zip_writer_set_certificate(void *handle, const char *cert_path, const char *cert_pwd) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + void *cert_stream = NULL; + uint8_t *cert_data = NULL; + int32_t cert_data_size = 0; + int32_t err = MZ_OK; + + if (cert_path == NULL) + return MZ_PARAM_ERROR; + + cert_data_size = (int32_t)mz_os_get_file_size(cert_path); + + if (cert_data_size == 0) + return MZ_PARAM_ERROR; + + if (writer->cert_data != NULL) { + MZ_FREE(writer->cert_data); + writer->cert_data = NULL; + } + + cert_data = (uint8_t *)MZ_ALLOC(cert_data_size); + + /* Read pkcs12 certificate from disk */ + mz_stream_os_create(&cert_stream); + err = mz_stream_os_open(cert_stream, cert_path, MZ_OPEN_MODE_READ); + if (err == MZ_OK) { + if (mz_stream_os_read(cert_stream, cert_data, cert_data_size) != cert_data_size) + err = MZ_READ_ERROR; + mz_stream_os_close(cert_stream); + } + mz_stream_os_delete(&cert_stream); + + if (err == MZ_OK) { + writer->cert_data = cert_data; + writer->cert_data_size = cert_data_size; + writer->cert_pwd = cert_pwd; + } else { + MZ_FREE(cert_data); + } + + return err; +} + +void mz_zip_writer_set_overwrite_cb(void *handle, void *userdata, mz_zip_writer_overwrite_cb cb) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->overwrite_cb = cb; + writer->overwrite_userdata = userdata; +} + +void mz_zip_writer_set_password_cb(void *handle, void *userdata, mz_zip_writer_password_cb cb) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->password_cb = cb; + writer->password_userdata = userdata; +} + +void mz_zip_writer_set_progress_cb(void *handle, void *userdata, mz_zip_writer_progress_cb cb) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->progress_cb = cb; + writer->progress_userdata = userdata; +} + +void mz_zip_writer_set_progress_interval(void *handle, uint32_t milliseconds) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->progress_cb_interval_ms = milliseconds; +} + +void mz_zip_writer_set_entry_cb(void *handle, void *userdata, mz_zip_writer_entry_cb cb) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + writer->entry_cb = cb; + writer->entry_userdata = userdata; +} + +int32_t mz_zip_writer_get_zip_handle(void *handle, void **zip_handle) { + mz_zip_writer *writer = (mz_zip_writer *)handle; + if (zip_handle == NULL) + return MZ_PARAM_ERROR; + *zip_handle = writer->zip_handle; + if (*zip_handle == NULL) + return MZ_EXIST_ERROR; + return MZ_OK; +} + +/***************************************************************************/ + +void *mz_zip_writer_create(void **handle) { + mz_zip_writer *writer = NULL; + + writer = (mz_zip_writer *)MZ_ALLOC(sizeof(mz_zip_writer)); + if (writer != NULL) { + memset(writer, 0, sizeof(mz_zip_writer)); +#if defined(HAVE_WZAES) + writer->aes = 1; +#endif +#if defined(HAVE_ZLIB) || defined(HAVE_LIBCOMP) + writer->compress_method = MZ_COMPRESS_METHOD_DEFLATE; +#elif defined(HAVE_BZIP2) + writer->compress_method = MZ_COMPRESS_METHOD_BZIP2; +#elif defined(HAVE_LZMA) + writer->compress_method = MZ_COMPRESS_METHOD_LZMA; +#else + writer->compress_method = MZ_COMPRESS_METHOD_STORE; +#endif + writer->compress_level = MZ_COMPRESS_LEVEL_BEST; + writer->progress_cb_interval_ms = MZ_DEFAULT_PROGRESS_INTERVAL; + } + if (handle != NULL) + *handle = writer; + + return writer; +} + +void mz_zip_writer_delete(void **handle) { + mz_zip_writer *writer = NULL; + if (handle == NULL) + return; + writer = (mz_zip_writer *)*handle; + if (writer != NULL) { + mz_zip_writer_close(writer); + + if (writer->cert_data != NULL) + MZ_FREE(writer->cert_data); + + writer->cert_data = NULL; + writer->cert_data_size = 0; + + MZ_FREE(writer); + } + *handle = NULL; +} + +/***************************************************************************/ diff --git a/Externals/minizip/mz_zip_rw.h b/Externals/minizip/mz_zip_rw.h new file mode 100644 index 000000000000..e507f94a5a5f --- /dev/null +++ b/Externals/minizip/mz_zip_rw.h @@ -0,0 +1,285 @@ +/* mz_zip_rw.h -- Zip reader/writer + part of the minizip-ng project + + Copyright (C) 2010-2021 Nathan Moinvaziri + https://github.com/zlib-ng/minizip-ng + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_ZIP_RW_H +#define MZ_ZIP_RW_H + +#ifdef __cplusplus +extern "C" { +#endif + +/***************************************************************************/ + +typedef int32_t (*mz_zip_reader_overwrite_cb)(void *handle, void *userdata, mz_zip_file *file_info, const char *path); +typedef int32_t (*mz_zip_reader_password_cb)(void *handle, void *userdata, mz_zip_file *file_info, char *password, int32_t max_password); +typedef int32_t (*mz_zip_reader_progress_cb)(void *handle, void *userdata, mz_zip_file *file_info, int64_t position); +typedef int32_t (*mz_zip_reader_entry_cb)(void *handle, void *userdata, mz_zip_file *file_info, const char *path); + +/***************************************************************************/ + +int32_t mz_zip_reader_is_open(void *handle); +/* Checks to see if the zip file is open */ + +int32_t mz_zip_reader_open(void *handle, void *stream); +/* Opens zip file from stream */ + +int32_t mz_zip_reader_open_file(void *handle, const char *path); +/* Opens zip file from a file path */ + +int32_t mz_zip_reader_open_file_in_memory(void *handle, const char *path); +/* Opens zip file from a file path into memory for faster access */ + +int32_t mz_zip_reader_open_buffer(void *handle, uint8_t *buf, int32_t len, uint8_t copy); +/* Opens zip file from memory buffer */ + +int32_t mz_zip_reader_close(void *handle); +/* Closes the zip file */ + +/***************************************************************************/ + +int32_t mz_zip_reader_unzip_cd(void *handle); +/* Unzip the central directory */ + +/***************************************************************************/ + +int32_t mz_zip_reader_goto_first_entry(void *handle); +/* Goto the first entry in the zip file that matches the pattern */ + +int32_t mz_zip_reader_goto_next_entry(void *handle); +/* Goto the next entry in the zip file that matches the pattern */ + +int32_t mz_zip_reader_locate_entry(void *handle, const char *filename, uint8_t ignore_case); +/* Locates an entry by filename */ + +int32_t mz_zip_reader_entry_open(void *handle); +/* Opens an entry for reading */ + +int32_t mz_zip_reader_entry_close(void *handle); +/* Closes an entry */ + +int32_t mz_zip_reader_entry_read(void *handle, void *buf, int32_t len); +/* Reads and entry after being opened */ + +int32_t mz_zip_reader_entry_has_sign(void *handle); +/* Checks to see if the entry has a signature */ + +int32_t mz_zip_reader_entry_sign_verify(void *handle); +/* Verifies a signature stored with the entry */ + +int32_t mz_zip_reader_entry_get_hash(void *handle, uint16_t algorithm, uint8_t *digest, int32_t digest_size); +/* Gets a hash algorithm from the entry's extra field */ + +int32_t mz_zip_reader_entry_get_first_hash(void *handle, uint16_t *algorithm, uint16_t *digest_size); +/* Gets the most secure hash algorithm from the entry's extra field */ + +int32_t mz_zip_reader_entry_get_info(void *handle, mz_zip_file **file_info); +/* Gets the current entry file info */ + +int32_t mz_zip_reader_entry_is_dir(void *handle); +/* Gets the current entry is a directory */ + +int32_t mz_zip_reader_entry_save(void *handle, void *stream, mz_stream_write_cb write_cb); +/* Save the current entry to a stream */ + +int32_t mz_zip_reader_entry_save_process(void *handle, void *stream, mz_stream_write_cb write_cb); +/* Saves a portion of the current entry to a stream callback */ + +int32_t mz_zip_reader_entry_save_file(void *handle, const char *path); +/* Save the current entry to a file */ + +int32_t mz_zip_reader_entry_save_buffer(void *handle, void *buf, int32_t len); +/* Save the current entry to a memory buffer */ + +int32_t mz_zip_reader_entry_save_buffer_length(void *handle); +/* Gets the length of the buffer required to save */ + +/***************************************************************************/ + +int32_t mz_zip_reader_save_all(void *handle, const char *destination_dir); +/* Save all files into a directory */ + +/***************************************************************************/ + +void mz_zip_reader_set_pattern(void *handle, const char *pattern, uint8_t ignore_case); +/* Sets the match pattern for entries in the zip file, if null all entries are matched */ + +void mz_zip_reader_set_password(void *handle, const char *password); +/* Sets the password required for extraction */ + +void mz_zip_reader_set_raw(void *handle, uint8_t raw); +/* Sets whether or not it should save the entry raw */ + +int32_t mz_zip_reader_get_raw(void *handle, uint8_t *raw); +/* Gets whether or not it should save the entry raw */ + +int32_t mz_zip_reader_get_zip_cd(void *handle, uint8_t *zip_cd); +/* Gets whether or not the archive has a zipped central directory */ + +int32_t mz_zip_reader_get_comment(void *handle, const char **comment); +/* Gets the comment for the central directory */ + +int32_t mz_zip_reader_set_recover(void *handle, uint8_t recover); +/* Sets the ability to recover the central dir by reading local file headers */ + +void mz_zip_reader_set_encoding(void *handle, int32_t encoding); +/* Sets whether or not it should support a special character encoding in zip file names. */ + +void mz_zip_reader_set_sign_required(void *handle, uint8_t sign_required); +/* Sets whether or not it a signature is required */ + +void mz_zip_reader_set_overwrite_cb(void *handle, void *userdata, mz_zip_reader_overwrite_cb cb); +/* Callback for what to do when a file is being overwritten */ + +void mz_zip_reader_set_password_cb(void *handle, void *userdata, mz_zip_reader_password_cb cb); +/* Callback for when a password is required and hasn't been set */ + +void mz_zip_reader_set_progress_cb(void *handle, void *userdata, mz_zip_reader_progress_cb cb); +/* Callback for extraction progress */ + +void mz_zip_reader_set_progress_interval(void *handle, uint32_t milliseconds); +/* Let at least milliseconds pass between calls to progress callback */ + +void mz_zip_reader_set_entry_cb(void *handle, void *userdata, mz_zip_reader_entry_cb cb); +/* Callback for zip file entries */ + +int32_t mz_zip_reader_get_zip_handle(void *handle, void **zip_handle); +/* Gets the underlying zip instance handle */ + +void* mz_zip_reader_create(void **handle); +/* Create new instance of zip reader */ + +void mz_zip_reader_delete(void **handle); +/* Delete instance of zip reader */ + +/***************************************************************************/ + +typedef int32_t (*mz_zip_writer_overwrite_cb)(void *handle, void *userdata, const char *path); +typedef int32_t (*mz_zip_writer_password_cb)(void *handle, void *userdata, mz_zip_file *file_info, char *password, int32_t max_password); +typedef int32_t (*mz_zip_writer_progress_cb)(void *handle, void *userdata, mz_zip_file *file_info, int64_t position); +typedef int32_t (*mz_zip_writer_entry_cb)(void *handle, void *userdata, mz_zip_file *file_info); + +/***************************************************************************/ + +int32_t mz_zip_writer_is_open(void *handle); +/* Checks to see if the zip file is open */ + +int32_t mz_zip_writer_open(void *handle, void *stream, uint8_t append); +/* Opens zip file from stream */ + +int32_t mz_zip_writer_open_file(void *handle, const char *path, int64_t disk_size, uint8_t append); +/* Opens zip file from a file path */ + +int32_t mz_zip_writer_open_file_in_memory(void *handle, const char *path); +/* Opens zip file from a file path into memory for faster access */ + +int32_t mz_zip_writer_close(void *handle); +/* Closes the zip file */ + +/***************************************************************************/ + +int32_t mz_zip_writer_entry_open(void *handle, mz_zip_file *file_info); +/* Opens an entry in the zip file for writing */ + +int32_t mz_zip_writer_entry_close(void *handle); +/* Closes entry in zip file */ + +int32_t mz_zip_writer_entry_write(void *handle, const void *buf, int32_t len); +/* Writes data into entry for zip */ + +/***************************************************************************/ + +int32_t mz_zip_writer_add(void *handle, void *stream, mz_stream_read_cb read_cb); +/* Writes all data to the currently open entry in the zip */ + +int32_t mz_zip_writer_add_process(void *handle, void *stream, mz_stream_read_cb read_cb); +/* Writes a portion of data to the currently open entry in the zip */ + +int32_t mz_zip_writer_add_info(void *handle, void *stream, mz_stream_read_cb read_cb, mz_zip_file *file_info); +/* Adds an entry to the zip based on the info */ + +int32_t mz_zip_writer_add_buffer(void *handle, void *buf, int32_t len, mz_zip_file *file_info); +/* Adds an entry to the zip with a memory buffer */ + +int32_t mz_zip_writer_add_file(void *handle, const char *path, const char *filename_in_zip); +/* Adds an entry to the zip from a file */ + +int32_t mz_zip_writer_add_path(void *handle, const char *path, const char *root_path, uint8_t include_path, + uint8_t recursive); +/* Enumerates a directory or pattern and adds entries to the zip */ + +int32_t mz_zip_writer_copy_from_reader(void *handle, void *reader); +/* Adds an entry from a zip reader instance */ + +/***************************************************************************/ + +void mz_zip_writer_set_password(void *handle, const char *password); +/* Password to use for encrypting files in the zip */ + +void mz_zip_writer_set_comment(void *handle, const char *comment); +/* Comment to use for the archive */ + +void mz_zip_writer_set_raw(void *handle, uint8_t raw); +/* Sets whether or not we should write the entry raw */ + +int32_t mz_zip_writer_get_raw(void *handle, uint8_t *raw); +/* Gets whether or not we should write the entry raw */ + +void mz_zip_writer_set_aes(void *handle, uint8_t aes); +/* Use aes encryption when adding files in zip */ + +void mz_zip_writer_set_compress_method(void *handle, uint16_t compress_method); +/* Sets the compression method when adding files in zip */ + +void mz_zip_writer_set_compress_level(void *handle, int16_t compress_level); +/* Sets the compression level when adding files in zip */ + +void mz_zip_writer_set_follow_links(void *handle, uint8_t follow_links); +/* Follow symbolic links when traversing directories and files to add */ + +void mz_zip_writer_set_store_links(void *handle, uint8_t store_links); +/* Store symbolic links in zip file */ + +void mz_zip_writer_set_zip_cd(void *handle, uint8_t zip_cd); +/* Sets whether or not central directory should be zipped */ + +int32_t mz_zip_writer_set_certificate(void *handle, const char *cert_path, const char *cert_pwd); +/* Sets the certificate and timestamp url to use for signing when adding files in zip */ + +void mz_zip_writer_set_overwrite_cb(void *handle, void *userdata, mz_zip_writer_overwrite_cb cb); +/* Callback for what to do when zip file already exists */ + +void mz_zip_writer_set_password_cb(void *handle, void *userdata, mz_zip_writer_password_cb cb); +/* Callback for ask if a password is required for adding */ + +void mz_zip_writer_set_progress_cb(void *handle, void *userdata, mz_zip_writer_progress_cb cb); +/* Callback for compression progress */ + +void mz_zip_writer_set_progress_interval(void *handle, uint32_t milliseconds); +/* Let at least milliseconds pass between calls to progress callback */ + +void mz_zip_writer_set_entry_cb(void *handle, void *userdata, mz_zip_writer_entry_cb cb); +/* Callback for zip file entries */ + +int32_t mz_zip_writer_get_zip_handle(void *handle, void **zip_handle); +/* Gets the underlying zip handle */ + +void* mz_zip_writer_create(void **handle); +/* Create new instance of zip writer */ + +void mz_zip_writer_delete(void **handle); +/* Delete instance of zip writer */ + +/***************************************************************************/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/Externals/minizip/unzip.c b/Externals/minizip/unzip.c deleted file mode 100644 index 145fe5fbf8d0..000000000000 --- a/Externals/minizip/unzip.c +++ /dev/null @@ -1,1991 +0,0 @@ -/* unzip.c -- IO for uncompress .zip files using zlib - Version 1.1, February 14h, 2010 - part of the MiniZip project - - Copyright (C) 1998-2010 Gilles Vollant - http://www.winimage.com/zLibDll/minizip.html - Modifications of Unzip for Zip64 - Copyright (C) 2007-2008 Even Rouault - Modifications for Zip64 support on both zip and unzip - Copyright (C) 2009-2010 Mathias Svensson - http://result42.com - Modifications for AES, PKWARE disk spanning - Copyright (C) 2010-2014 Nathan Moinvaziri - - This program is distributed under the terms of the same license as zlib. - See the accompanying LICENSE file for the full text of the license. -*/ - -#include -#include -#include -#include -#include - -#include "zlib.h" -#include "unzip.h" - -#ifdef HAVE_AES -# define AES_METHOD (99) -# define AES_PWVERIFYSIZE (2) -# define AES_MAXSALTLENGTH (16) -# define AES_AUTHCODESIZE (10) -# define AES_HEADERSIZE (11) -# define AES_KEYSIZE(mode) (64 + (mode * 64)) - -# include "aes/aes.h" -# include "aes/fileenc.h" -#endif -#ifdef HAVE_APPLE_COMPRESSION -# include -#endif - -#ifndef NOUNCRYPT -# include "crypt.h" -#endif - -#define DISKHEADERMAGIC (0x08074b50) -#define LOCALHEADERMAGIC (0x04034b50) -#define CENTRALHEADERMAGIC (0x02014b50) -#define ENDHEADERMAGIC (0x06054b50) -#define ZIP64ENDHEADERMAGIC (0x06064b50) -#define ZIP64ENDLOCHEADERMAGIC (0x07064b50) - -#define SIZECENTRALDIRITEM (0x2e) -#define SIZECENTRALHEADERLOCATOR (0x14) -#define SIZEZIPLOCALHEADER (0x1e) - -#ifndef BUFREADCOMMENT -# define BUFREADCOMMENT (0x400) -#endif - -#ifndef UNZ_BUFSIZE -# define UNZ_BUFSIZE (UINT16_MAX) -#endif -#ifndef UNZ_MAXFILENAMEINZIP -# define UNZ_MAXFILENAMEINZIP (256) -#endif - -#ifndef ALLOC -# define ALLOC(size) (malloc(size)) -#endif -#ifndef TRYFREE -# define TRYFREE(p) {if (p) free(p);} -#endif - -const char unz_copyright[] = - " unzip 1.01 Copyright 1998-2004 Gilles Vollant - http://www.winimage.com/zLibDll"; - -/* unz_file_info_internal contain internal info about a file in zipfile*/ -typedef struct unz_file_info64_internal_s -{ - uint64_t offset_curfile; /* relative offset of local header 8 bytes */ - uint64_t byte_before_the_zipfile; /* byte before the zipfile, (>0 for sfx) */ -#ifdef HAVE_AES - uint8_t aes_encryption_mode; - uint16_t aes_compression_method; - uint16_t aes_version; -#endif -} unz_file_info64_internal; - -/* file_in_zip_read_info_s contain internal information about a file in zipfile */ -typedef struct -{ - uint8_t *read_buffer; /* internal buffer for compressed data */ - z_stream stream; /* zLib stream structure for inflate */ -#ifdef HAVE_BZIP2 - bz_stream bstream; /* bzLib stream structure for bziped */ -#endif -#ifdef HAVE_APPLE_COMPRESSION - compression_stream astream; /* libcompression stream structure */ -#endif -#ifdef HAVE_AES - fcrypt_ctx aes_ctx; -#endif - uint64_t pos_in_zipfile; /* position in byte on the zipfile, for fseek */ - uint8_t stream_initialised; /* flag set if stream structure is initialised */ - - uint64_t offset_local_extrafield; /* offset of the local extra field */ - uint16_t size_local_extrafield; /* size of the local extra field */ - uint64_t pos_local_extrafield; /* position in the local extra field in read */ - uint64_t total_out_64; - - uint32_t crc32; /* crc32 of all data uncompressed */ - uint32_t crc32_wait; /* crc32 we must obtain after decompress all */ - uint64_t rest_read_compressed; /* number of byte to be decompressed */ - uint64_t rest_read_uncompressed; /* number of byte to be obtained after decomp */ - - zlib_filefunc64_32_def z_filefunc; - - voidpf filestream; /* io structore of the zipfile */ - uint16_t compression_method; /* compression method (0==store) */ - uint64_t byte_before_the_zipfile; /* byte before the zipfile, (>0 for sfx) */ - int raw; -} file_in_zip64_read_info_s; - -/* unz64_s contain internal information about the zipfile */ -typedef struct -{ - zlib_filefunc64_32_def z_filefunc; - - voidpf filestream; /* io structure of the current zipfile */ - voidpf filestream_with_CD; /* io structure of the disk with the central directory */ - - unz_global_info64 gi; /* public global information */ - - uint64_t byte_before_the_zipfile; /* byte before the zipfile, (>0 for sfx) */ - uint64_t num_file; /* number of the current file in the zipfile */ - uint64_t pos_in_central_dir; /* pos of the current file in the central dir */ - uint64_t current_file_ok; /* flag about the usability of the current file */ - uint64_t central_pos; /* position of the beginning of the central dir */ - uint32_t number_disk; /* number of the current disk, used for spanning ZIP */ - uint64_t size_central_dir; /* size of the central directory */ - uint64_t offset_central_dir; /* offset of start of central directory with - respect to the starting disk number */ - - unz_file_info64 cur_file_info; /* public info about the current file in zip*/ - unz_file_info64_internal cur_file_info_internal; - /* private info about it*/ - file_in_zip64_read_info_s *pfile_in_zip_read; - /* structure about the current file if we are decompressing it */ - int is_zip64; /* is the current file zip64 */ -#ifndef NOUNCRYPT - uint32_t keys[3]; /* keys defining the pseudo-random sequence */ - const z_crc_t *pcrc_32_tab; -#endif -} unz64_s; - -/* Read a byte from a gz_stream; Return EOF for end of file. */ -static int unzReadUInt8(const zlib_filefunc64_32_def *pzlib_filefunc_def, voidpf filestream, uint8_t *value) -{ - uint8_t c = 0; - if (ZREAD64(*pzlib_filefunc_def, filestream, &c, 1) == 1) - { - *value = (uint8_t)c; - return UNZ_OK; - } - *value = 0; - if (ZERROR64(*pzlib_filefunc_def, filestream)) - return UNZ_ERRNO; - return UNZ_EOF; -} - -static int unzReadUInt16(const zlib_filefunc64_32_def *pzlib_filefunc_def, voidpf filestream, uint16_t *value) -{ - uint16_t x; - uint8_t c = 0; - int err = UNZ_OK; - - err = unzReadUInt8(pzlib_filefunc_def, filestream, &c); - x = (uint16_t)c; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &c); - x |= ((uint16_t)c) << 8; - - if (err == UNZ_OK) - *value = x; - else - *value = 0; - return err; -} - -static int unzReadUInt32(const zlib_filefunc64_32_def *pzlib_filefunc_def, voidpf filestream, uint32_t *value) -{ - uint32_t x = 0; - uint8_t c = 0; - int err = UNZ_OK; - - err = unzReadUInt8(pzlib_filefunc_def, filestream, &c); - x = (uint32_t)c; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &c); - x |= ((uint32_t)c) << 8; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &c); - x |= ((uint32_t)c) << 16; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &c); - x += ((uint32_t)c) << 24; - - if (err == UNZ_OK) - *value = x; - else - *value = 0; - return err; -} - -static int unzReadUInt64(const zlib_filefunc64_32_def *pzlib_filefunc_def, voidpf filestream, uint64_t *value) -{ - uint64_t x = 0; - uint8_t i = 0; - int err = UNZ_OK; - - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x = (uint64_t)i; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 8; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 16; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 24; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 32; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 40; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 48; - if (err == UNZ_OK) - err = unzReadUInt8(pzlib_filefunc_def, filestream, &i); - x |= ((uint64_t)i) << 56; - - if (err == UNZ_OK) - *value = x; - else - *value = 0; - return err; -} - -/* Locate the Central directory of a zip file (at the end, just before the global comment) */ -static uint64_t unzSearchCentralDir(const zlib_filefunc64_32_def *pzlib_filefunc_def, voidpf filestream) -{ - uint8_t buf[BUFREADCOMMENT + 4]; - uint64_t file_size = 0; - uint64_t back_read = 4; - uint64_t max_back = UINT16_MAX; /* maximum size of global comment */ - uint64_t pos_found = 0; - uint32_t read_size = 0; - uint64_t read_pos = 0; - uint32_t i = 0; - - if (ZSEEK64(*pzlib_filefunc_def, filestream, 0, ZLIB_FILEFUNC_SEEK_END) != 0) - return 0; - - file_size = ZTELL64(*pzlib_filefunc_def, filestream); - - if (max_back > file_size) - max_back = file_size; - - while (back_read < max_back) - { - if (back_read + BUFREADCOMMENT > max_back) - back_read = max_back; - else - back_read += BUFREADCOMMENT; - - read_pos = file_size - back_read; - read_size = ((BUFREADCOMMENT + 4) < (file_size - read_pos)) ? - (BUFREADCOMMENT + 4) : (uint32_t)(file_size - read_pos); - - if (ZSEEK64(*pzlib_filefunc_def, filestream, read_pos, ZLIB_FILEFUNC_SEEK_SET) != 0) - break; - if (ZREAD64(*pzlib_filefunc_def, filestream, buf, read_size) != read_size) - break; - - for (i = read_size - 3; (i--) > 0;) - if (((*(buf+i)) == (ENDHEADERMAGIC & 0xff)) && - ((*(buf+i+1)) == (ENDHEADERMAGIC >> 8 & 0xff)) && - ((*(buf+i+2)) == (ENDHEADERMAGIC >> 16 & 0xff)) && - ((*(buf+i+3)) == (ENDHEADERMAGIC >> 24 & 0xff))) - { - pos_found = read_pos+i; - break; - } - - if (pos_found != 0) - break; - } - - return pos_found; -} - -/* Locate the Central directory 64 of a zipfile (at the end, just before the global comment) */ -static uint64_t unzSearchCentralDir64(const zlib_filefunc64_32_def *pzlib_filefunc_def, voidpf filestream, - const uint64_t endcentraloffset) -{ - uint64_t offset = 0; - uint32_t value32 = 0; - - /* Zip64 end of central directory locator */ - if (ZSEEK64(*pzlib_filefunc_def, filestream, endcentraloffset - SIZECENTRALHEADERLOCATOR, ZLIB_FILEFUNC_SEEK_SET) != 0) - return 0; - - /* Read locator signature */ - if (unzReadUInt32(pzlib_filefunc_def, filestream, &value32) != UNZ_OK) - return 0; - if (value32 != ZIP64ENDLOCHEADERMAGIC) - return 0; - /* Number of the disk with the start of the zip64 end of central directory */ - if (unzReadUInt32(pzlib_filefunc_def, filestream, &value32) != UNZ_OK) - return 0; - /* Relative offset of the zip64 end of central directory record */ - if (unzReadUInt64(pzlib_filefunc_def, filestream, &offset) != UNZ_OK) - return 0; - /* Total number of disks */ - if (unzReadUInt32(pzlib_filefunc_def, filestream, &value32) != UNZ_OK) - return 0; - /* Goto end of central directory record */ - if (ZSEEK64(*pzlib_filefunc_def, filestream, offset, ZLIB_FILEFUNC_SEEK_SET) != 0) - return 0; - /* The signature */ - if (unzReadUInt32(pzlib_filefunc_def, filestream, &value32) != UNZ_OK) - return 0; - if (value32 != ZIP64ENDHEADERMAGIC) - return 0; - - return offset; -} - -static unzFile unzOpenInternal(const void *path, zlib_filefunc64_32_def *pzlib_filefunc64_32_def) -{ - unz64_s us; - unz64_s *s = NULL; - uint64_t central_pos = 0; - uint64_t central_pos64 = 0; - uint64_t number_entry_CD = 0; - uint16_t value16 = 0; - uint32_t value32 = 0; - uint64_t value64 = 0; - voidpf filestream = NULL; - int err = UNZ_OK; - - if (unz_copyright[0] != ' ') - return NULL; - - us.filestream = NULL; - us.filestream_with_CD = NULL; - us.z_filefunc.zseek32_file = NULL; - us.z_filefunc.ztell32_file = NULL; - - if (pzlib_filefunc64_32_def == NULL) - fill_fopen64_filefunc(&us.z_filefunc.zfile_func64); - else - us.z_filefunc = *pzlib_filefunc64_32_def; - - us.filestream = ZOPEN64(us.z_filefunc, path, ZLIB_FILEFUNC_MODE_READ | ZLIB_FILEFUNC_MODE_EXISTING); - - if (us.filestream == NULL) - return NULL; - - us.filestream_with_CD = us.filestream; - us.is_zip64 = 0; - - /* Search for end of central directory header */ - central_pos = unzSearchCentralDir(&us.z_filefunc, us.filestream); - if (central_pos) - { - if (ZSEEK64(us.z_filefunc, us.filestream, central_pos, ZLIB_FILEFUNC_SEEK_SET) != 0) - err = UNZ_ERRNO; - - /* The signature, already checked */ - if (unzReadUInt32(&us.z_filefunc, us.filestream, &value32) != UNZ_OK) - err = UNZ_ERRNO; - /* Number of this disk */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - us.number_disk = value16; - /* Number of the disk with the start of the central directory */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - us.gi.number_disk_with_CD = value16; - /* Total number of entries in the central directory on this disk */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - us.gi.number_entry = value16; - /* Total number of entries in the central directory */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - number_entry_CD = value16; - if (number_entry_CD != us.gi.number_entry) - err = UNZ_BADZIPFILE; - /* Size of the central directory */ - if (unzReadUInt32(&us.z_filefunc, us.filestream, &value32) != UNZ_OK) - err = UNZ_ERRNO; - us.size_central_dir = value32; - /* Offset of start of central directory with respect to the starting disk number */ - if (unzReadUInt32(&us.z_filefunc, us.filestream, &value32) != UNZ_OK) - err = UNZ_ERRNO; - us.offset_central_dir = value32; - /* Zipfile comment length */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &us.gi.size_comment) != UNZ_OK) - err = UNZ_ERRNO; - - if (err == UNZ_OK) - { - /* Search for Zip64 end of central directory header */ - central_pos64 = unzSearchCentralDir64(&us.z_filefunc, us.filestream, central_pos); - if (central_pos64) - { - central_pos = central_pos64; - us.is_zip64 = 1; - - if (ZSEEK64(us.z_filefunc, us.filestream, central_pos, ZLIB_FILEFUNC_SEEK_SET) != 0) - err = UNZ_ERRNO; - - /* the signature, already checked */ - if (unzReadUInt32(&us.z_filefunc, us.filestream, &value32) != UNZ_OK) - err = UNZ_ERRNO; - /* size of zip64 end of central directory record */ - if (unzReadUInt64(&us.z_filefunc, us.filestream, &value64) != UNZ_OK) - err = UNZ_ERRNO; - /* version made by */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - /* version needed to extract */ - if (unzReadUInt16(&us.z_filefunc, us.filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - /* number of this disk */ - if (unzReadUInt32(&us.z_filefunc, us.filestream, &us.number_disk) != UNZ_OK) - err = UNZ_ERRNO; - /* number of the disk with the start of the central directory */ - if (unzReadUInt32(&us.z_filefunc, us.filestream, &us.gi.number_disk_with_CD) != UNZ_OK) - err = UNZ_ERRNO; - /* total number of entries in the central directory on this disk */ - if (unzReadUInt64(&us.z_filefunc, us.filestream, &us.gi.number_entry) != UNZ_OK) - err = UNZ_ERRNO; - /* total number of entries in the central directory */ - if (unzReadUInt64(&us.z_filefunc, us.filestream, &number_entry_CD) != UNZ_OK) - err = UNZ_ERRNO; - if (number_entry_CD != us.gi.number_entry) - err = UNZ_BADZIPFILE; - /* size of the central directory */ - if (unzReadUInt64(&us.z_filefunc, us.filestream, &us.size_central_dir) != UNZ_OK) - err = UNZ_ERRNO; - /* offset of start of central directory with respect to the starting disk number */ - if (unzReadUInt64(&us.z_filefunc, us.filestream, &us.offset_central_dir) != UNZ_OK) - err = UNZ_ERRNO; - } - else if ((us.gi.number_entry == UINT16_MAX) || (us.size_central_dir == UINT16_MAX) || (us.offset_central_dir == UINT32_MAX)) - err = UNZ_BADZIPFILE; - } - } - else - err = UNZ_ERRNO; - - if ((err == UNZ_OK) && (central_pos < us.offset_central_dir + us.size_central_dir)) - err = UNZ_BADZIPFILE; - - if (err != UNZ_OK) - { - ZCLOSE64(us.z_filefunc, us.filestream); - return NULL; - } - - if (us.gi.number_disk_with_CD == 0) - { - /* If there is only one disk open another stream so we don't have to seek between the CD - and the file headers constantly */ - filestream = ZOPEN64(us.z_filefunc, path, ZLIB_FILEFUNC_MODE_READ | ZLIB_FILEFUNC_MODE_EXISTING); - if (filestream != NULL) - us.filestream = filestream; - } - - /* Hack for zip files that have no respect for zip64 - if ((central_pos > 0xffffffff) && (us.offset_central_dir < 0xffffffff)) - us.offset_central_dir = central_pos - us.size_central_dir;*/ - - us.byte_before_the_zipfile = central_pos - (us.offset_central_dir + us.size_central_dir); - us.central_pos = central_pos; - us.pfile_in_zip_read = NULL; - - s = (unz64_s*)ALLOC(sizeof(unz64_s)); - if (s != NULL) - { - *s = us; - unzGoToFirstFile((unzFile)s); - } - return (unzFile)s; -} - -extern unzFile ZEXPORT unzOpen2(const char *path, zlib_filefunc_def *pzlib_filefunc32_def) -{ - if (pzlib_filefunc32_def != NULL) - { - zlib_filefunc64_32_def zlib_filefunc64_32_def_fill; - fill_zlib_filefunc64_32_def_from_filefunc32(&zlib_filefunc64_32_def_fill, pzlib_filefunc32_def); - return unzOpenInternal(path, &zlib_filefunc64_32_def_fill); - } - return unzOpenInternal(path, NULL); -} - -extern unzFile ZEXPORT unzOpen2_64(const void *path, zlib_filefunc64_def *pzlib_filefunc_def) -{ - if (pzlib_filefunc_def != NULL) - { - zlib_filefunc64_32_def zlib_filefunc64_32_def_fill; - zlib_filefunc64_32_def_fill.zfile_func64 = *pzlib_filefunc_def; - zlib_filefunc64_32_def_fill.ztell32_file = NULL; - zlib_filefunc64_32_def_fill.zseek32_file = NULL; - return unzOpenInternal(path, &zlib_filefunc64_32_def_fill); - } - return unzOpenInternal(path, NULL); -} - -extern unzFile ZEXPORT unzOpen(const char *path) -{ - return unzOpenInternal(path, NULL); -} - -extern unzFile ZEXPORT unzOpen64(const void *path) -{ - return unzOpenInternal(path, NULL); -} - -extern int ZEXPORT unzClose(unzFile file) -{ - unz64_s *s; - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - if (s->pfile_in_zip_read != NULL) - unzCloseCurrentFile(file); - - if ((s->filestream != NULL) && (s->filestream != s->filestream_with_CD)) - ZCLOSE64(s->z_filefunc, s->filestream); - if (s->filestream_with_CD != NULL) - ZCLOSE64(s->z_filefunc, s->filestream_with_CD); - - s->filestream = NULL; - s->filestream_with_CD = NULL; - TRYFREE(s); - return UNZ_OK; -} - -/* Goto to the next available disk for spanned archives */ -static int unzGoToNextDisk(unzFile file) -{ - unz64_s *s; - uint32_t number_disk_next = 0; - - s = (unz64_s*)file; - if (s == NULL) - return UNZ_PARAMERROR; - number_disk_next = s->number_disk; - - if ((s->pfile_in_zip_read != NULL) && (s->pfile_in_zip_read->rest_read_uncompressed > 0)) - /* We are currently reading a file and we need the next sequential disk */ - number_disk_next += 1; - else - /* Goto the disk for the current file */ - number_disk_next = s->cur_file_info.disk_num_start; - - if (number_disk_next != s->number_disk) - { - /* Switch disks */ - if ((s->filestream != NULL) && (s->filestream != s->filestream_with_CD)) - ZCLOSE64(s->z_filefunc, s->filestream); - - if (number_disk_next == s->gi.number_disk_with_CD) - { - s->filestream = s->filestream_with_CD; - } - else - { - s->filestream = ZOPENDISK64(s->z_filefunc, s->filestream_with_CD, number_disk_next, - ZLIB_FILEFUNC_MODE_READ | ZLIB_FILEFUNC_MODE_EXISTING); - } - - if (s->filestream == NULL) - return UNZ_ERRNO; - - s->number_disk = number_disk_next; - } - - return UNZ_OK; -} - -extern int ZEXPORT unzGetGlobalInfo(unzFile file, unz_global_info* pglobal_info32) -{ - unz64_s *s = NULL; - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - pglobal_info32->number_entry = (uint32_t)s->gi.number_entry; - pglobal_info32->size_comment = s->gi.size_comment; - pglobal_info32->number_disk_with_CD = s->gi.number_disk_with_CD; - return UNZ_OK; -} - -extern int ZEXPORT unzGetGlobalInfo64(unzFile file, unz_global_info64 *pglobal_info) -{ - unz64_s *s = NULL; - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - *pglobal_info = s->gi; - return UNZ_OK; -} - -extern int ZEXPORT unzGetGlobalComment(unzFile file, char *comment, uint16_t comment_size) -{ - unz64_s *s = NULL; - uint16_t bytes_to_read = comment_size; - if (file == NULL) - return (int)UNZ_PARAMERROR; - s = (unz64_s*)file; - - if (bytes_to_read > s->gi.size_comment) - bytes_to_read = s->gi.size_comment; - - if (ZSEEK64(s->z_filefunc, s->filestream_with_CD, s->central_pos + 22, ZLIB_FILEFUNC_SEEK_SET) != 0) - return UNZ_ERRNO; - - if (bytes_to_read > 0) - { - *comment = 0; - if (ZREAD64(s->z_filefunc, s->filestream_with_CD, comment, bytes_to_read) != bytes_to_read) - return UNZ_ERRNO; - } - - if ((comment != NULL) && (comment_size > s->gi.size_comment)) - *(comment + s->gi.size_comment) = 0; - - return (int)bytes_to_read; -} - -static int unzGetCurrentFileInfoField(unzFile file, uint32_t *seek, void *field, uint16_t field_size, uint16_t size_file_field, int null_terminated_field) -{ - unz64_s *s = NULL; - uint32_t bytes_to_read = 0; - int err = UNZ_OK; - - if (file == NULL) - return (int)UNZ_PARAMERROR; - s = (unz64_s*)file; - - /* Read field */ - if (field != NULL) - { - if (size_file_field < field_size) - { - if (null_terminated_field) - *((char *)field+size_file_field) = 0; - - bytes_to_read = size_file_field; - } - else - bytes_to_read = field_size; - - if (*seek != 0) - { - if (ZSEEK64(s->z_filefunc, s->filestream_with_CD, *seek, ZLIB_FILEFUNC_SEEK_CUR) == 0) - *seek = 0; - else - err = UNZ_ERRNO; - } - - if ((size_file_field > 0) && (field_size > 0)) - { - if (ZREAD64(s->z_filefunc, s->filestream_with_CD, field, bytes_to_read) != bytes_to_read) - err = UNZ_ERRNO; - } - *seek += size_file_field - bytes_to_read; - } - else - { - *seek += size_file_field; - } - - return err; -} - -/* Get info about the current file in the zipfile, with internal only info */ -static int unzGetCurrentFileInfoInternal(unzFile file, unz_file_info64 *pfile_info, - unz_file_info64_internal *pfile_info_internal, char *filename, uint16_t filename_size, void *extrafield, - uint16_t extrafield_size, char *comment, uint16_t comment_size) -{ - unz64_s *s = NULL; - unz_file_info64 file_info; - unz_file_info64_internal file_info_internal; - uint32_t magic = 0; - uint64_t current_pos = 0; - uint32_t seek = 0; - uint32_t extra_pos = 0; - uint16_t extra_header_id = 0; - uint16_t extra_data_size = 0; - uint16_t value16 = 0; - uint32_t value32 = 0; - uint64_t value64 = 0; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - if (ZSEEK64(s->z_filefunc, s->filestream_with_CD, - s->pos_in_central_dir + s->byte_before_the_zipfile, ZLIB_FILEFUNC_SEEK_SET) != 0) - err = UNZ_ERRNO; - - /* Check the magic */ - if (err == UNZ_OK) - { - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &magic) != UNZ_OK) - err = UNZ_ERRNO; - else if (magic != CENTRALHEADERMAGIC) - err = UNZ_BADZIPFILE; - } - - /* Read central directory header */ - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.version) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.version_needed) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.flag) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.compression_method) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &file_info.dos_date) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &file_info.crc) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &value32) != UNZ_OK) - err = UNZ_ERRNO; - file_info.compressed_size = value32; - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &value32) != UNZ_OK) - err = UNZ_ERRNO; - file_info.uncompressed_size = value32; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.size_filename) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.size_file_extra) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.size_file_comment) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &value16) != UNZ_OK) - err = UNZ_ERRNO; - file_info.disk_num_start = value16; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &file_info.internal_fa) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &file_info.external_fa) != UNZ_OK) - err = UNZ_ERRNO; - /* Relative offset of local header */ - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &value32) != UNZ_OK) - err = UNZ_ERRNO; - - file_info.size_file_extra_internal = 0; - file_info.disk_offset = value32; - file_info_internal.offset_curfile = value32; -#ifdef HAVE_AES - file_info_internal.aes_compression_method = 0; - file_info_internal.aes_encryption_mode = 0; - file_info_internal.aes_version = 0; -#endif - - if (err == UNZ_OK) - err = unzGetCurrentFileInfoField(file, &seek, filename, filename_size, file_info.size_filename, 1); - - /* Read extrafield */ - if (err == UNZ_OK) - err = unzGetCurrentFileInfoField(file, &seek, extrafield, extrafield_size, file_info.size_file_extra, 0); - - if ((err == UNZ_OK) && (file_info.size_file_extra != 0)) - { - if (seek != 0) - { - if (ZSEEK64(s->z_filefunc, s->filestream_with_CD, seek, ZLIB_FILEFUNC_SEEK_CUR) == 0) - seek = 0; - else - err = UNZ_ERRNO; - } - - /* We are going to parse the extra field so we need to move back */ - current_pos = ZTELL64(s->z_filefunc, s->filestream_with_CD); - if (current_pos < file_info.size_file_extra) - err = UNZ_ERRNO; - current_pos -= file_info.size_file_extra; - if (ZSEEK64(s->z_filefunc, s->filestream_with_CD, current_pos, ZLIB_FILEFUNC_SEEK_SET) != 0) - err = UNZ_ERRNO; - - while ((err != UNZ_ERRNO) && (extra_pos < file_info.size_file_extra)) - { - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &extra_header_id) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &extra_data_size) != UNZ_OK) - err = UNZ_ERRNO; - - /* ZIP64 extra fields */ - if (extra_header_id == 0x0001) - { - /* Subtract size of ZIP64 field, since ZIP64 is handled internally */ - file_info.size_file_extra_internal += 2 + 2 + extra_data_size; - - if (file_info.uncompressed_size == UINT32_MAX) - { - if (unzReadUInt64(&s->z_filefunc, s->filestream_with_CD, &file_info.uncompressed_size) != UNZ_OK) - err = UNZ_ERRNO; - } - if (file_info.compressed_size == UINT32_MAX) - { - if (unzReadUInt64(&s->z_filefunc, s->filestream_with_CD, &file_info.compressed_size) != UNZ_OK) - err = UNZ_ERRNO; - } - if (file_info_internal.offset_curfile == UINT32_MAX) - { - /* Relative Header offset */ - if (unzReadUInt64(&s->z_filefunc, s->filestream_with_CD, &value64) != UNZ_OK) - err = UNZ_ERRNO; - file_info_internal.offset_curfile = value64; - file_info.disk_offset = value64; - } - if (file_info.disk_num_start == UINT32_MAX) - { - /* Disk Start Number */ - if (unzReadUInt32(&s->z_filefunc, s->filestream_with_CD, &file_info.disk_num_start) != UNZ_OK) - err = UNZ_ERRNO; - } - } -#ifdef HAVE_AES - /* AES header */ - else if (extra_header_id == 0x9901) - { - uint8_t value8 = 0; - - /* Subtract size of AES field, since AES is handled internally */ - file_info.size_file_extra_internal += 2 + 2 + extra_data_size; - - /* Verify version info */ - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &value16) != UNZ_OK) - err = UNZ_ERRNO; - /* Support AE-1 and AE-2 */ - if (value16 != 1 && value16 != 2) - err = UNZ_ERRNO; - file_info_internal.aes_version = value16; - if (unzReadUInt8(&s->z_filefunc, s->filestream_with_CD, &value8) != UNZ_OK) - err = UNZ_ERRNO; - if ((char)value8 != 'A') - err = UNZ_ERRNO; - if (unzReadUInt8(&s->z_filefunc, s->filestream_with_CD, &value8) != UNZ_OK) - err = UNZ_ERRNO; - if ((char)value8 != 'E') - err = UNZ_ERRNO; - /* Get AES encryption strength and actual compression method */ - if (unzReadUInt8(&s->z_filefunc, s->filestream_with_CD, &value8) != UNZ_OK) - err = UNZ_ERRNO; - file_info_internal.aes_encryption_mode = value8; - if (unzReadUInt16(&s->z_filefunc, s->filestream_with_CD, &value16) != UNZ_OK) - err = UNZ_ERRNO; - file_info_internal.aes_compression_method = value16; - } -#endif - else - { - if (ZSEEK64(s->z_filefunc, s->filestream_with_CD,extra_data_size, ZLIB_FILEFUNC_SEEK_CUR) != 0) - err = UNZ_ERRNO; - } - - extra_pos += 2 + 2 + extra_data_size; - } - } - - if (file_info.disk_num_start == s->gi.number_disk_with_CD) - file_info_internal.byte_before_the_zipfile = s->byte_before_the_zipfile; - else - file_info_internal.byte_before_the_zipfile = 0; - - if (err == UNZ_OK) - err = unzGetCurrentFileInfoField(file, &seek, comment, comment_size, file_info.size_file_comment, 1); - - if ((err == UNZ_OK) && (pfile_info != NULL)) - *pfile_info = file_info; - if ((err == UNZ_OK) && (pfile_info_internal != NULL)) - *pfile_info_internal = file_info_internal; - - return err; -} - -extern int ZEXPORT unzGetCurrentFileInfo(unzFile file, unz_file_info *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size) -{ - unz_file_info64 file_info64; - int err = UNZ_OK; - - err = unzGetCurrentFileInfoInternal(file, &file_info64, NULL, filename, filename_size, - extrafield, extrafield_size, comment, comment_size); - - if ((err == UNZ_OK) && (pfile_info != NULL)) - { - pfile_info->version = file_info64.version; - pfile_info->version_needed = file_info64.version_needed; - pfile_info->flag = file_info64.flag; - pfile_info->compression_method = file_info64.compression_method; - pfile_info->dos_date = file_info64.dos_date; - pfile_info->crc = file_info64.crc; - - pfile_info->size_filename = file_info64.size_filename; - pfile_info->size_file_extra = file_info64.size_file_extra - file_info64.size_file_extra_internal; - pfile_info->size_file_comment = file_info64.size_file_comment; - - pfile_info->disk_num_start = (uint16_t)file_info64.disk_num_start; - pfile_info->internal_fa = file_info64.internal_fa; - pfile_info->external_fa = file_info64.external_fa; - - pfile_info->compressed_size = (uint32_t)file_info64.compressed_size; - pfile_info->uncompressed_size = (uint32_t)file_info64.uncompressed_size; - } - return err; -} - -extern int ZEXPORT unzGetCurrentFileInfo64(unzFile file, unz_file_info64 * pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size) -{ - return unzGetCurrentFileInfoInternal(file, pfile_info, NULL, filename, filename_size, - extrafield, extrafield_size, comment,comment_size); -} - -/* Read the local header of the current zipfile. Check the coherency of the local header and info in the - end of central directory about this file store in *piSizeVar the size of extra info in local header - (filename and size of extra field data) */ -static int unzCheckCurrentFileCoherencyHeader(unz64_s *s, uint32_t *psize_variable, uint64_t *poffset_local_extrafield, - uint16_t *psize_local_extrafield) -{ - uint32_t magic = 0; - uint16_t value16 = 0; - uint32_t value32 = 0; - uint32_t flags = 0; - uint16_t size_filename = 0; - uint16_t size_extra_field = 0; - uint16_t compression_method = 0; - int err = UNZ_OK; - - if (psize_variable == NULL) - return UNZ_PARAMERROR; - *psize_variable = 0; - if (poffset_local_extrafield == NULL) - return UNZ_PARAMERROR; - *poffset_local_extrafield = 0; - if (psize_local_extrafield == NULL) - return UNZ_PARAMERROR; - *psize_local_extrafield = 0; - - err = unzGoToNextDisk((unzFile)s); - if (err != UNZ_OK) - return err; - - if (ZSEEK64(s->z_filefunc, s->filestream, s->cur_file_info_internal.offset_curfile + - s->cur_file_info_internal.byte_before_the_zipfile, ZLIB_FILEFUNC_SEEK_SET) != 0) - return UNZ_ERRNO; - - if (err == UNZ_OK) - { - if (unzReadUInt32(&s->z_filefunc, s->filestream, &magic) != UNZ_OK) - err = UNZ_ERRNO; - else if (magic != LOCALHEADERMAGIC) - err = UNZ_BADZIPFILE; - } - - if (unzReadUInt16(&s->z_filefunc, s->filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - if (unzReadUInt16(&s->z_filefunc, s->filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - flags = value16; - if (unzReadUInt16(&s->z_filefunc, s->filestream, &value16) != UNZ_OK) - err = UNZ_ERRNO; - else if ((err == UNZ_OK) && (value16 != s->cur_file_info.compression_method)) - err = UNZ_BADZIPFILE; - - compression_method = s->cur_file_info.compression_method; -#ifdef HAVE_AES - if (compression_method == AES_METHOD) - compression_method = s->cur_file_info_internal.aes_compression_method; -#endif - - if ((err == UNZ_OK) && (compression_method != 0) && (compression_method != Z_DEFLATED)) - { -#ifdef HAVE_BZIP2 - if (compression_method != Z_BZIP2ED) -#endif - err = UNZ_BADZIPFILE; - } - - if (unzReadUInt32(&s->z_filefunc, s->filestream, &value32) != UNZ_OK) /* date/time */ - err = UNZ_ERRNO; - if (unzReadUInt32(&s->z_filefunc, s->filestream, &value32) != UNZ_OK) /* crc */ - err = UNZ_ERRNO; - else if ((err == UNZ_OK) && (value32 != s->cur_file_info.crc) && ((flags & 8) == 0)) - err = UNZ_BADZIPFILE; - if (unzReadUInt32(&s->z_filefunc, s->filestream, &value32) != UNZ_OK) /* size compr */ - err = UNZ_ERRNO; - else if ((value32 != UINT32_MAX) && (err == UNZ_OK) && (value32 != s->cur_file_info.compressed_size) && ((flags & 8) == 0)) - err = UNZ_BADZIPFILE; - if (unzReadUInt32(&s->z_filefunc, s->filestream, &value32) != UNZ_OK) /* size uncompr */ - err = UNZ_ERRNO; - else if ((value32 != UINT32_MAX) && (err == UNZ_OK) && (value32 != s->cur_file_info.uncompressed_size) && ((flags & 8) == 0)) - err = UNZ_BADZIPFILE; - if (unzReadUInt16(&s->z_filefunc, s->filestream, &size_filename) != UNZ_OK) - err = UNZ_ERRNO; - else if ((err == UNZ_OK) && (size_filename != s->cur_file_info.size_filename)) - err = UNZ_BADZIPFILE; - - *psize_variable += size_filename; - - if (unzReadUInt16(&s->z_filefunc, s->filestream, &size_extra_field) != UNZ_OK) - err = UNZ_ERRNO; - - *poffset_local_extrafield = s->cur_file_info_internal.offset_curfile + SIZEZIPLOCALHEADER + size_filename; - *psize_local_extrafield = size_extra_field; - *psize_variable += size_extra_field; - - return err; -} - -/* - Open for reading data the current file in the zipfile. - If there is no error and the file is opened, the return value is UNZ_OK. -*/ -extern int ZEXPORT unzOpenCurrentFile3(unzFile file, int *method, int *level, int raw, const char *password) -{ - unz64_s *s = NULL; - file_in_zip64_read_info_s *pfile_in_zip_read_info = NULL; - uint16_t compression_method = 0; - uint64_t offset_local_extrafield = 0; - uint16_t size_local_extrafield = 0; - uint32_t size_variable = 0; - int err = UNZ_OK; -#ifndef NOUNCRYPT - char source[12]; -#else - if (password != NULL) - return UNZ_PARAMERROR; -#endif - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (!s->current_file_ok) - return UNZ_PARAMERROR; - - if (s->pfile_in_zip_read != NULL) - unzCloseCurrentFile(file); - - if (unzCheckCurrentFileCoherencyHeader(s, &size_variable, &offset_local_extrafield, &size_local_extrafield) != UNZ_OK) - return UNZ_BADZIPFILE; - - pfile_in_zip_read_info = (file_in_zip64_read_info_s*)ALLOC(sizeof(file_in_zip64_read_info_s)); - if (pfile_in_zip_read_info == NULL) - return UNZ_INTERNALERROR; - - pfile_in_zip_read_info->read_buffer = (uint8_t*)ALLOC(UNZ_BUFSIZE); - pfile_in_zip_read_info->offset_local_extrafield = offset_local_extrafield; - pfile_in_zip_read_info->size_local_extrafield = size_local_extrafield; - pfile_in_zip_read_info->pos_local_extrafield = 0; - pfile_in_zip_read_info->raw = raw; - - if (pfile_in_zip_read_info->read_buffer == NULL) - { - TRYFREE(pfile_in_zip_read_info); - return UNZ_INTERNALERROR; - } - - pfile_in_zip_read_info->stream_initialised = 0; - - compression_method = s->cur_file_info.compression_method; -#ifdef HAVE_AES - if (compression_method == AES_METHOD) - { - compression_method = s->cur_file_info_internal.aes_compression_method; - if (password == NULL) - { - TRYFREE(pfile_in_zip_read_info); - return UNZ_PARAMERROR; - } - } -#endif - - if (method != NULL) - *method = compression_method; - - if (level != NULL) - { - *level = 6; - switch (s->cur_file_info.flag & 0x06) - { - case 6 : *level = 1; break; - case 4 : *level = 2; break; - case 2 : *level = 9; break; - } - } - - if ((compression_method != 0) && (compression_method != Z_DEFLATED)) - { -#ifdef HAVE_BZIP2 - if (compression_method != Z_BZIP2ED) -#endif - { - TRYFREE(pfile_in_zip_read_info); - return UNZ_BADZIPFILE; - } - } - - pfile_in_zip_read_info->crc32_wait = s->cur_file_info.crc; - pfile_in_zip_read_info->crc32 = 0; - pfile_in_zip_read_info->total_out_64 = 0; - pfile_in_zip_read_info->compression_method = compression_method; - pfile_in_zip_read_info->filestream = s->filestream; - pfile_in_zip_read_info->z_filefunc = s->z_filefunc; - if (s->number_disk == s->gi.number_disk_with_CD) - pfile_in_zip_read_info->byte_before_the_zipfile = s->byte_before_the_zipfile; - else - pfile_in_zip_read_info->byte_before_the_zipfile = 0; - pfile_in_zip_read_info->stream.total_out = 0; - pfile_in_zip_read_info->stream.total_in = 0; - pfile_in_zip_read_info->stream.next_in = NULL; - - if (!raw) - { - if (compression_method == Z_BZIP2ED) - { -#ifdef HAVE_BZIP2 - pfile_in_zip_read_info->bstream.bzalloc = (void *(*) (void *, int, int))0; - pfile_in_zip_read_info->bstream.bzfree = (free_func)0; - pfile_in_zip_read_info->bstream.opaque = (voidpf)0; - pfile_in_zip_read_info->bstream.state = (voidpf)0; - - pfile_in_zip_read_info->stream.zalloc = (alloc_func)0; - pfile_in_zip_read_info->stream.zfree = (free_func)0; - pfile_in_zip_read_info->stream.opaque = (voidpf)0; - pfile_in_zip_read_info->stream.next_in = (voidpf)0; - pfile_in_zip_read_info->stream.avail_in = 0; - - err = BZ2_bzDecompressInit(&pfile_in_zip_read_info->bstream, 0, 0); - if (err == Z_OK) - { - pfile_in_zip_read_info->stream_initialised = Z_BZIP2ED; - } - else - { - TRYFREE(pfile_in_zip_read_info); - return err; - } -#else - pfile_in_zip_read_info->raw = 1; -#endif - } - else if (compression_method == Z_DEFLATED) - { - pfile_in_zip_read_info->stream.zalloc = (alloc_func)0; - pfile_in_zip_read_info->stream.zfree = (free_func)0; - pfile_in_zip_read_info->stream.opaque = (voidpf)s; - pfile_in_zip_read_info->stream.next_in = 0; - pfile_in_zip_read_info->stream.avail_in = 0; - -#ifdef HAVE_APPLE_COMPRESSION - err = compression_stream_init(&pfile_in_zip_read_info->astream, COMPRESSION_STREAM_DECODE, COMPRESSION_ZLIB); - if (err == COMPRESSION_STATUS_ERROR) - err = UNZ_INTERNALERROR; - else - err = Z_OK; -#else - err = inflateInit2(&pfile_in_zip_read_info->stream, -MAX_WBITS); -#endif - if (err == Z_OK) - { - pfile_in_zip_read_info->stream_initialised = Z_DEFLATED; - } - else - { - TRYFREE(pfile_in_zip_read_info); - return err; - } - /* windowBits is passed < 0 to tell that there is no zlib header. - * Note that in this case inflate *requires* an extra "dummy" byte - * after the compressed stream in order to complete decompression and - * return Z_STREAM_END. - * In unzip, i don't wait absolutely Z_STREAM_END because I known the - * size of both compressed and uncompressed data - */ - } - } - - pfile_in_zip_read_info->rest_read_compressed = s->cur_file_info.compressed_size; - pfile_in_zip_read_info->rest_read_uncompressed = s->cur_file_info.uncompressed_size; - pfile_in_zip_read_info->pos_in_zipfile = s->cur_file_info_internal.offset_curfile + SIZEZIPLOCALHEADER + size_variable; - pfile_in_zip_read_info->stream.avail_in = 0; - - s->pfile_in_zip_read = pfile_in_zip_read_info; - -#ifndef NOUNCRYPT - s->pcrc_32_tab = NULL; - - if ((password != NULL) && ((s->cur_file_info.flag & 1) != 0)) - { - if (ZSEEK64(s->z_filefunc, s->filestream, - s->pfile_in_zip_read->pos_in_zipfile + s->pfile_in_zip_read->byte_before_the_zipfile, - ZLIB_FILEFUNC_SEEK_SET) != 0) - return UNZ_INTERNALERROR; -#ifdef HAVE_AES - if (s->cur_file_info.compression_method == AES_METHOD) - { - unsigned char passverify_archive[AES_PWVERIFYSIZE]; - unsigned char passverify_password[AES_PWVERIFYSIZE]; - unsigned char salt_value[AES_MAXSALTLENGTH]; - uint32_t salt_length = 0; - - if ((s->cur_file_info_internal.aes_encryption_mode < 1) || - (s->cur_file_info_internal.aes_encryption_mode > 3)) - return UNZ_INTERNALERROR; - - salt_length = SALT_LENGTH(s->cur_file_info_internal.aes_encryption_mode); - - if (ZREAD64(s->z_filefunc, s->filestream, salt_value, salt_length) != salt_length) - return UNZ_INTERNALERROR; - if (ZREAD64(s->z_filefunc, s->filestream, passverify_archive, AES_PWVERIFYSIZE) != AES_PWVERIFYSIZE) - return UNZ_INTERNALERROR; - - fcrypt_init(s->cur_file_info_internal.aes_encryption_mode, (uint8_t *)password, - (uint32_t)strlen(password), salt_value, passverify_password, &s->pfile_in_zip_read->aes_ctx); - - if (memcmp(passverify_archive, passverify_password, AES_PWVERIFYSIZE) != 0) - return UNZ_BADPASSWORD; - - s->pfile_in_zip_read->rest_read_compressed -= salt_length + AES_PWVERIFYSIZE; - s->pfile_in_zip_read->rest_read_compressed -= AES_AUTHCODESIZE; - - s->pfile_in_zip_read->pos_in_zipfile += salt_length + AES_PWVERIFYSIZE; - } - else -#endif - { - int i; - s->pcrc_32_tab = (const z_crc_t*)get_crc_table(); - init_keys(password, s->keys, s->pcrc_32_tab); - - if (ZREAD64(s->z_filefunc, s->filestream, source, 12) < 12) - return UNZ_INTERNALERROR; - - for (i = 0; i < 12; i++) - zdecode(s->keys, s->pcrc_32_tab, source[i]); - - s->pfile_in_zip_read->rest_read_compressed -= 12; - s->pfile_in_zip_read->pos_in_zipfile += 12; - } - } -#endif - - return UNZ_OK; -} - -extern int ZEXPORT unzOpenCurrentFile(unzFile file) -{ - return unzOpenCurrentFile3(file, NULL, NULL, 0, NULL); -} - -extern int ZEXPORT unzOpenCurrentFilePassword(unzFile file, const char *password) -{ - return unzOpenCurrentFile3(file, NULL, NULL, 0, password); -} - -extern int ZEXPORT unzOpenCurrentFile2(unzFile file, int *method, int *level, int raw) -{ - return unzOpenCurrentFile3(file, method, level, raw, NULL); -} - -/* Read bytes from the current file. - buf contain buffer where data must be copied - len the size of buf. - - return the number of byte copied if some bytes are copied - return 0 if the end of file was reached - return <0 with error code if there is an error (UNZ_ERRNO for IO error, or zLib error for uncompress error) */ -extern int ZEXPORT unzReadCurrentFile(unzFile file, voidp buf, uint32_t len) -{ - unz64_s *s = NULL; - uint32_t read = 0; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - if (s->pfile_in_zip_read == NULL) - return UNZ_PARAMERROR; - if (s->pfile_in_zip_read->read_buffer == NULL) - return UNZ_END_OF_LIST_OF_FILE; - if (len == 0) - return 0; - if (len > UINT16_MAX) - return UNZ_PARAMERROR; - - s->pfile_in_zip_read->stream.next_out = (uint8_t*)buf; - s->pfile_in_zip_read->stream.avail_out = (uint16_t)len; - - if (s->pfile_in_zip_read->raw) - { - if (len > s->pfile_in_zip_read->rest_read_compressed + s->pfile_in_zip_read->stream.avail_in) - s->pfile_in_zip_read->stream.avail_out = (uint16_t)s->pfile_in_zip_read->rest_read_compressed + - s->pfile_in_zip_read->stream.avail_in; - } - else - { - if (len > s->pfile_in_zip_read->rest_read_uncompressed) - s->pfile_in_zip_read->stream.avail_out = (uint16_t)s->pfile_in_zip_read->rest_read_uncompressed; - } - - while (s->pfile_in_zip_read->stream.avail_out > 0) - { - if (s->pfile_in_zip_read->stream.avail_in == 0) - { - uint32_t bytes_to_read = UNZ_BUFSIZE; - uint32_t bytes_not_read = 0; - uint32_t bytes_read = 0; - uint32_t total_bytes_read = 0; - - if (s->pfile_in_zip_read->stream.next_in != NULL) - bytes_not_read = (uint32_t)(s->pfile_in_zip_read->read_buffer + UNZ_BUFSIZE - - s->pfile_in_zip_read->stream.next_in); - bytes_to_read -= bytes_not_read; - if (bytes_not_read > 0) - memcpy(s->pfile_in_zip_read->read_buffer, s->pfile_in_zip_read->stream.next_in, bytes_not_read); - if (s->pfile_in_zip_read->rest_read_compressed < bytes_to_read) - bytes_to_read = (uint16_t)s->pfile_in_zip_read->rest_read_compressed; - - while (total_bytes_read != bytes_to_read) - { - if (ZSEEK64(s->pfile_in_zip_read->z_filefunc, s->pfile_in_zip_read->filestream, - s->pfile_in_zip_read->pos_in_zipfile + s->pfile_in_zip_read->byte_before_the_zipfile, - ZLIB_FILEFUNC_SEEK_SET) != 0) - return UNZ_ERRNO; - - bytes_read = ZREAD64(s->pfile_in_zip_read->z_filefunc, s->pfile_in_zip_read->filestream, - s->pfile_in_zip_read->read_buffer + bytes_not_read + total_bytes_read, - bytes_to_read - total_bytes_read); - - total_bytes_read += bytes_read; - s->pfile_in_zip_read->pos_in_zipfile += bytes_read; - - if (bytes_read == 0) - { - if (ZERROR64(s->pfile_in_zip_read->z_filefunc, s->pfile_in_zip_read->filestream)) - return UNZ_ERRNO; - - err = unzGoToNextDisk(file); - if (err != UNZ_OK) - return err; - - s->pfile_in_zip_read->pos_in_zipfile = 0; - s->pfile_in_zip_read->filestream = s->filestream; - } - } - -#ifndef NOUNCRYPT - if ((s->cur_file_info.flag & 1) != 0) - { -#ifdef HAVE_AES - if (s->cur_file_info.compression_method == AES_METHOD) - { - fcrypt_decrypt(s->pfile_in_zip_read->read_buffer, bytes_to_read, &s->pfile_in_zip_read->aes_ctx); - } - else -#endif - if (s->pcrc_32_tab != NULL) - { - uint32_t i = 0; - - for (i = 0; i < total_bytes_read; i++) - s->pfile_in_zip_read->read_buffer[i] = - zdecode(s->keys, s->pcrc_32_tab, s->pfile_in_zip_read->read_buffer[i]); - } - } -#endif - - s->pfile_in_zip_read->rest_read_compressed -= total_bytes_read; - s->pfile_in_zip_read->stream.next_in = (uint8_t*)s->pfile_in_zip_read->read_buffer; - s->pfile_in_zip_read->stream.avail_in = (uint16_t)(bytes_not_read + total_bytes_read); - } - - if ((s->pfile_in_zip_read->compression_method == 0) || (s->pfile_in_zip_read->raw)) - { - uint32_t i = 0; - uint32_t copy = 0; - - if ((s->pfile_in_zip_read->stream.avail_in == 0) && - (s->pfile_in_zip_read->rest_read_compressed == 0)) - return (read == 0) ? UNZ_EOF : read; - - if (s->pfile_in_zip_read->stream.avail_out < s->pfile_in_zip_read->stream.avail_in) - copy = s->pfile_in_zip_read->stream.avail_out; - else - copy = s->pfile_in_zip_read->stream.avail_in; - - for (i = 0; i < copy; i++) - *(s->pfile_in_zip_read->stream.next_out + i) = - *(s->pfile_in_zip_read->stream.next_in + i); - - s->pfile_in_zip_read->total_out_64 = s->pfile_in_zip_read->total_out_64 + copy; - s->pfile_in_zip_read->rest_read_uncompressed -= copy; - s->pfile_in_zip_read->crc32 = (uint32_t)crc32(s->pfile_in_zip_read->crc32, - s->pfile_in_zip_read->stream.next_out, copy); - - s->pfile_in_zip_read->stream.avail_in -= copy; - s->pfile_in_zip_read->stream.avail_out -= copy; - s->pfile_in_zip_read->stream.next_out += copy; - s->pfile_in_zip_read->stream.next_in += copy; - s->pfile_in_zip_read->stream.total_out += copy; - - read += copy; - } - else if (s->pfile_in_zip_read->compression_method == Z_BZIP2ED) - { -#ifdef HAVE_BZIP2 - uint64_t total_out_before = 0; - uint64_t total_out_after = 0; - uint64_t out_bytes = 0; - const uint8_t *buf_before = NULL; - - s->pfile_in_zip_read->bstream.next_in = (char*)s->pfile_in_zip_read->stream.next_in; - s->pfile_in_zip_read->bstream.avail_in = s->pfile_in_zip_read->stream.avail_in; - s->pfile_in_zip_read->bstream.total_in_lo32 = (uint32_t)s->pfile_in_zip_read->stream.total_in; - s->pfile_in_zip_read->bstream.total_in_hi32 = s->pfile_in_zip_read->stream.total_in >> 32; - - s->pfile_in_zip_read->bstream.next_out = (char*)s->pfile_in_zip_read->stream.next_out; - s->pfile_in_zip_read->bstream.avail_out = s->pfile_in_zip_read->stream.avail_out; - s->pfile_in_zip_read->bstream.total_out_lo32 = (uint32_t)s->pfile_in_zip_read->stream.total_out; - s->pfile_in_zip_read->bstream.total_out_hi32 = s->pfile_in_zip_read->stream.total_out >> 32; - - total_out_before = s->pfile_in_zip_read->bstream.total_out_lo32 + - (((uint32_t)s->pfile_in_zip_read->bstream.total_out_hi32) << 32); - buf_before = (const uint8_t*)s->pfile_in_zip_read->bstream.next_out; - - err = BZ2_bzDecompress(&s->pfile_in_zip_read->bstream); - - total_out_after = s->pfile_in_zip_read->bstream.total_out_lo32 + - (((uint32_t)s->pfile_in_zip_read->bstream.total_out_hi32) << 32); - - out_bytes = total_out_after - total_out_before; - - s->pfile_in_zip_read->total_out_64 = s->pfile_in_zip_read->total_out_64 + out_bytes; - s->pfile_in_zip_read->rest_read_uncompressed -= out_bytes; - s->pfile_in_zip_read->crc32 = crc32(s->pfile_in_zip_read->crc32, buf_before, (uint32_t)out_bytes); - - read += (uint32_t)out_bytes; - - s->pfile_in_zip_read->stream.next_in = (uint8_t*)s->pfile_in_zip_read->bstream.next_in; - s->pfile_in_zip_read->stream.avail_in = s->pfile_in_zip_read->bstream.avail_in; - s->pfile_in_zip_read->stream.total_in = s->pfile_in_zip_read->bstream.total_in_lo32; - s->pfile_in_zip_read->stream.next_out = (uint8_t*)s->pfile_in_zip_read->bstream.next_out; - s->pfile_in_zip_read->stream.avail_out = s->pfile_in_zip_read->bstream.avail_out; - s->pfile_in_zip_read->stream.total_out = s->pfile_in_zip_read->bstream.total_out_lo32; - - if (err == BZ_STREAM_END) - return (read == 0) ? UNZ_EOF : read; - if (err != BZ_OK) - break; -#endif - } -#ifdef HAVE_APPLE_COMPRESSION - else - { - uint64_t total_out_before = 0; - uint64_t total_out_after = 0; - uint64_t out_bytes = 0; - const uint8_t *buf_before = NULL; - - s->pfile_in_zip_read->astream.src_ptr = s->pfile_in_zip_read->stream.next_in; - s->pfile_in_zip_read->astream.src_size = s->pfile_in_zip_read->stream.avail_in; - s->pfile_in_zip_read->astream.dst_ptr = s->pfile_in_zip_read->stream.next_out; - s->pfile_in_zip_read->astream.dst_size = len; - - total_out_before = s->pfile_in_zip_read->stream.total_out; - buf_before = s->pfile_in_zip_read->stream.next_out; - - compression_status status; - compression_stream_flags flags; - - if (s->pfile_in_zip_read->stream.avail_in == 0) - { - flags = COMPRESSION_STREAM_FINALIZE; - } - - status = compression_stream_process(&s->pfile_in_zip_read->astream, flags); - - total_out_after = len - s->pfile_in_zip_read->astream.dst_size; - out_bytes = total_out_after - total_out_before; - - s->pfile_in_zip_read->total_out_64 += out_bytes; - s->pfile_in_zip_read->rest_read_uncompressed -= out_bytes; - s->pfile_in_zip_read->crc32 = - crc32(s->pfile_in_zip_read->crc32, buf_before, (uint32_t)out_bytes); - - read += (uint32_t)out_bytes; - - s->pfile_in_zip_read->stream.next_in = s->pfile_in_zip_read->astream.src_ptr; - s->pfile_in_zip_read->stream.avail_in = s->pfile_in_zip_read->astream.src_size; - s->pfile_in_zip_read->stream.next_out = s->pfile_in_zip_read->astream.dst_ptr; - s->pfile_in_zip_read->stream.avail_out = s->pfile_in_zip_read->astream.dst_size; - - if (status == COMPRESSION_STATUS_END) - return (read == 0) ? UNZ_EOF : read; - if (status == COMPRESSION_STATUS_ERROR) - return Z_DATA_ERROR; - return read; - } -#else - else - { - - uint64_t total_out_before = 0; - uint64_t total_out_after = 0; - uint64_t out_bytes = 0; - const uint8_t *buf_before = NULL; - int flush = Z_SYNC_FLUSH; - - total_out_before = s->pfile_in_zip_read->stream.total_out; - buf_before = s->pfile_in_zip_read->stream.next_out; - - /* - if ((pfile_in_zip_read_info->rest_read_uncompressed == - pfile_in_zip_read_info->stream.avail_out) && - (pfile_in_zip_read_info->rest_read_compressed == 0)) - flush = Z_FINISH; - */ - err = inflate(&s->pfile_in_zip_read->stream, flush); - - if ((err >= 0) && (s->pfile_in_zip_read->stream.msg != NULL)) - err = Z_DATA_ERROR; - - total_out_after = s->pfile_in_zip_read->stream.total_out; - out_bytes = total_out_after - total_out_before; - - s->pfile_in_zip_read->total_out_64 += out_bytes; - s->pfile_in_zip_read->rest_read_uncompressed -= out_bytes; - s->pfile_in_zip_read->crc32 = - (uint32_t)crc32(s->pfile_in_zip_read->crc32,buf_before, (uint32_t)out_bytes); - - read += (uint32_t)out_bytes; - - if (err == Z_STREAM_END) - return (read == 0) ? UNZ_EOF : read; - if (err != Z_OK) - break; - } -#endif - } - - if (err == Z_OK) - return read; - return err; -} - -extern int ZEXPORT unzGetLocalExtrafield(unzFile file, voidp buf, uint32_t len) -{ - unz64_s *s = NULL; - uint64_t size_to_read = 0; - uint32_t read_now = 0; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (s->pfile_in_zip_read == NULL) - return UNZ_PARAMERROR; - - size_to_read = s->pfile_in_zip_read->size_local_extrafield - s->pfile_in_zip_read->pos_local_extrafield; - - if (buf == NULL) - return (int)size_to_read; - - if (len > size_to_read) - read_now = (uint32_t)size_to_read; - else - read_now = len; - - if (read_now == 0) - return 0; - - if (ZSEEK64(s->pfile_in_zip_read->z_filefunc, s->pfile_in_zip_read->filestream, - s->pfile_in_zip_read->offset_local_extrafield + s->pfile_in_zip_read->pos_local_extrafield, - ZLIB_FILEFUNC_SEEK_SET) != 0) - return UNZ_ERRNO; - - if (ZREAD64(s->pfile_in_zip_read->z_filefunc, s->pfile_in_zip_read->filestream, buf, read_now) != read_now) - return UNZ_ERRNO; - - return (int)read_now; -} - -extern int ZEXPORT unzCloseCurrentFile(unzFile file) -{ - unz64_s *s = NULL; - file_in_zip64_read_info_s *pfile_in_zip_read_info = NULL; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - pfile_in_zip_read_info = s->pfile_in_zip_read; - if (pfile_in_zip_read_info == NULL) - return UNZ_PARAMERROR; - -#ifdef HAVE_AES - if (s->cur_file_info.compression_method == AES_METHOD) - { - unsigned char authcode[AES_AUTHCODESIZE]; - unsigned char rauthcode[AES_AUTHCODESIZE]; - - if (ZREAD64(s->z_filefunc, s->filestream, authcode, AES_AUTHCODESIZE) != AES_AUTHCODESIZE) - return UNZ_ERRNO; - - if (fcrypt_end(rauthcode, &s->pfile_in_zip_read->aes_ctx) != AES_AUTHCODESIZE) - err = UNZ_CRCERROR; - if (memcmp(authcode, rauthcode, AES_AUTHCODESIZE) != 0) - err = UNZ_CRCERROR; - } - /* AES zip version AE-1 will expect a valid crc as well */ - if ((s->cur_file_info.compression_method != AES_METHOD) || - (s->cur_file_info_internal.aes_version == 0x0001)) -#endif - { - if ((pfile_in_zip_read_info->rest_read_uncompressed == 0) && - (!pfile_in_zip_read_info->raw)) - { - if (pfile_in_zip_read_info->crc32 != pfile_in_zip_read_info->crc32_wait) - err = UNZ_CRCERROR; - } - } - - TRYFREE(pfile_in_zip_read_info->read_buffer); - pfile_in_zip_read_info->read_buffer = NULL; - if (pfile_in_zip_read_info->stream_initialised == Z_DEFLATED) - { -#ifdef HAVE_APPLE_COMPRESSION - if (compression_stream_destroy) - compression_stream_destroy(&pfile_in_zip_read_info->astream); -#else - inflateEnd(&pfile_in_zip_read_info->stream); -#endif - - } -#ifdef HAVE_BZIP2 - else if (pfile_in_zip_read_info->stream_initialised == Z_BZIP2ED) - BZ2_bzDecompressEnd(&pfile_in_zip_read_info->bstream); -#endif - - pfile_in_zip_read_info->stream_initialised = 0; - TRYFREE(pfile_in_zip_read_info); - - s->pfile_in_zip_read = NULL; - - return err; -} - -extern int ZEXPORT unzGoToFirstFile2(unzFile file, unz_file_info64 *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size) -{ - unz64_s *s = NULL; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - s->pos_in_central_dir = s->offset_central_dir; - s->num_file = 0; - - err = unzGetCurrentFileInfoInternal(file, &s->cur_file_info, &s->cur_file_info_internal, - filename, filename_size, extrafield, extrafield_size, comment,comment_size); - - s->current_file_ok = (err == UNZ_OK); - if ((err == UNZ_OK) && (pfile_info != NULL)) - memcpy(pfile_info, &s->cur_file_info, sizeof(unz_file_info64)); - - return err; -} - -extern int ZEXPORT unzGoToFirstFile(unzFile file) -{ - return unzGoToFirstFile2(file, NULL, NULL, 0, NULL, 0, NULL, 0); -} - -extern int ZEXPORT unzGoToNextFile2(unzFile file, unz_file_info64 *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size) -{ - unz64_s *s = NULL; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - if (!s->current_file_ok) - return UNZ_END_OF_LIST_OF_FILE; - if (s->gi.number_entry != UINT16_MAX) /* 2^16 files overflow hack */ - { - if (s->num_file+1 == s->gi.number_entry) - return UNZ_END_OF_LIST_OF_FILE; - } - - s->pos_in_central_dir += SIZECENTRALDIRITEM + s->cur_file_info.size_filename + - s->cur_file_info.size_file_extra + s->cur_file_info.size_file_comment; - s->num_file += 1; - - err = unzGetCurrentFileInfoInternal(file, &s->cur_file_info, &s->cur_file_info_internal, - filename, filename_size, extrafield,extrafield_size, comment, comment_size); - - s->current_file_ok = (err == UNZ_OK); - if ((err == UNZ_OK) && (pfile_info != NULL)) - memcpy(pfile_info, &s->cur_file_info, sizeof(unz_file_info64)); - - return err; -} - -extern int ZEXPORT unzGoToNextFile(unzFile file) -{ - return unzGoToNextFile2(file, NULL, NULL, 0, NULL, 0, NULL, 0); -} - -extern int ZEXPORT unzLocateFile(unzFile file, const char *filename, unzFileNameComparer filename_compare_func) -{ - unz64_s *s = NULL; - unz_file_info64 cur_file_info_saved; - unz_file_info64_internal cur_file_info_internal_saved; - uint64_t num_file_saved = 0; - uint64_t pos_in_central_dir_saved = 0; - char current_filename[UNZ_MAXFILENAMEINZIP+1]; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - if (strlen(filename) >= UNZ_MAXFILENAMEINZIP) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (!s->current_file_ok) - return UNZ_END_OF_LIST_OF_FILE; - - /* Save the current state */ - num_file_saved = s->num_file; - pos_in_central_dir_saved = s->pos_in_central_dir; - cur_file_info_saved = s->cur_file_info; - cur_file_info_internal_saved = s->cur_file_info_internal; - - err = unzGoToFirstFile2(file, NULL, current_filename, sizeof(current_filename)-1, NULL, 0, NULL, 0); - - while (err == UNZ_OK) - { - if (filename_compare_func != NULL) - err = filename_compare_func(file, current_filename, filename); - else - err = strcmp(current_filename, filename); - if (err == 0) - return UNZ_OK; - err = unzGoToNextFile2(file, NULL, current_filename, sizeof(current_filename)-1, NULL, 0, NULL, 0); - } - - /* We failed, so restore the state of the 'current file' to where we were. */ - s->num_file = num_file_saved; - s->pos_in_central_dir = pos_in_central_dir_saved; - s->cur_file_info = cur_file_info_saved; - s->cur_file_info_internal = cur_file_info_internal_saved; - return err; -} - -extern int ZEXPORT unzGetFilePos(unzFile file, unz_file_pos *file_pos) -{ - unz64_file_pos file_pos64; - int err = unzGetFilePos64(file, &file_pos64); - if (err == UNZ_OK) - { - file_pos->pos_in_zip_directory = (uint32_t)file_pos64.pos_in_zip_directory; - file_pos->num_of_file = (uint32_t)file_pos64.num_of_file; - } - return err; -} - -extern int ZEXPORT unzGoToFilePos(unzFile file, unz_file_pos *file_pos) -{ - unz64_file_pos file_pos64; - if (file_pos == NULL) - return UNZ_PARAMERROR; - file_pos64.pos_in_zip_directory = file_pos->pos_in_zip_directory; - file_pos64.num_of_file = file_pos->num_of_file; - return unzGoToFilePos64(file, &file_pos64); -} - -extern int ZEXPORT unzGetFilePos64(unzFile file, unz64_file_pos *file_pos) -{ - unz64_s *s = NULL; - - if (file == NULL || file_pos == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (!s->current_file_ok) - return UNZ_END_OF_LIST_OF_FILE; - - file_pos->pos_in_zip_directory = s->pos_in_central_dir; - file_pos->num_of_file = s->num_file; - return UNZ_OK; -} - -extern int ZEXPORT unzGoToFilePos64(unzFile file, const unz64_file_pos *file_pos) -{ - unz64_s *s = NULL; - int err = UNZ_OK; - - if (file == NULL || file_pos == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - /* Jump to the right spot */ - s->pos_in_central_dir = file_pos->pos_in_zip_directory; - s->num_file = file_pos->num_of_file; - - /* Set the current file */ - err = unzGetCurrentFileInfoInternal(file, &s->cur_file_info, &s->cur_file_info_internal, NULL, 0, NULL, 0, NULL, 0); - /* Return results */ - s->current_file_ok = (err == UNZ_OK); - return err; -} - -extern int32_t ZEXPORT unzGetOffset(unzFile file) -{ - uint64_t offset64 = 0; - - if (file == NULL) - return UNZ_PARAMERROR; - offset64 = unzGetOffset64(file); - return (int32_t)offset64; -} - -extern int64_t ZEXPORT unzGetOffset64(unzFile file) -{ - unz64_s *s = NULL; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (!s->current_file_ok) - return 0; - if (s->gi.number_entry != 0 && s->gi.number_entry != UINT16_MAX) - { - if (s->num_file == s->gi.number_entry) - return 0; - } - return s->pos_in_central_dir; -} - -extern int ZEXPORT unzSetOffset(unzFile file, uint32_t pos) -{ - return unzSetOffset64(file, pos); -} - -extern int ZEXPORT unzSetOffset64(unzFile file, uint64_t pos) -{ - unz64_s *s = NULL; - int err = UNZ_OK; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - s->pos_in_central_dir = pos; - s->num_file = s->gi.number_entry; /* hack */ - - err = unzGetCurrentFileInfoInternal(file, &s->cur_file_info, &s->cur_file_info_internal, NULL, 0, NULL, 0, NULL, 0); - - s->current_file_ok = (err == UNZ_OK); - return err; -} - -extern int32_t ZEXPORT unzTell(unzFile file) -{ - unz64_s *s = NULL; - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (s->pfile_in_zip_read == NULL) - return UNZ_PARAMERROR; - return (int32_t)s->pfile_in_zip_read->stream.total_out; -} - -extern int64_t ZEXPORT unzTell64(unzFile file) -{ - unz64_s *s = NULL; - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (s->pfile_in_zip_read == NULL) - return UNZ_PARAMERROR; - return s->pfile_in_zip_read->total_out_64; -} - -extern int ZEXPORT unzSeek(unzFile file, uint32_t offset, int origin) -{ - return unzSeek64(file, offset, origin); -} - -extern int ZEXPORT unzSeek64(unzFile file, uint64_t offset, int origin) -{ - unz64_s *s = NULL; - uint64_t stream_pos_begin = 0; - uint64_t stream_pos_end = 0; - uint64_t position = 0; - int is_within_buffer = 0; - - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - - if (s->pfile_in_zip_read == NULL) - return UNZ_ERRNO; - if (s->pfile_in_zip_read->compression_method != 0) - return UNZ_ERRNO; - - if (origin == SEEK_SET) - position = offset; - else if (origin == SEEK_CUR) - position = s->pfile_in_zip_read->total_out_64 + offset; - else if (origin == SEEK_END) - position = s->cur_file_info.compressed_size + offset; - else - return UNZ_PARAMERROR; - - if (position > s->cur_file_info.compressed_size) - return UNZ_PARAMERROR; - - stream_pos_end = s->pfile_in_zip_read->pos_in_zipfile; - stream_pos_begin = stream_pos_end; - - if (stream_pos_begin > UNZ_BUFSIZE) - stream_pos_begin -= UNZ_BUFSIZE; - else - stream_pos_begin = 0; - - is_within_buffer = - (s->pfile_in_zip_read->stream.avail_in != 0) && - (s->pfile_in_zip_read->rest_read_compressed != 0 || s->cur_file_info.compressed_size < UNZ_BUFSIZE) && - (position >= stream_pos_begin && position < stream_pos_end); - - if (is_within_buffer) - { - s->pfile_in_zip_read->stream.next_in += position - s->pfile_in_zip_read->total_out_64; - s->pfile_in_zip_read->stream.avail_in = (uInt)(stream_pos_end - position); - } - else - { - s->pfile_in_zip_read->stream.avail_in = 0; - s->pfile_in_zip_read->stream.next_in = 0; - - s->pfile_in_zip_read->pos_in_zipfile = s->pfile_in_zip_read->offset_local_extrafield + position; - s->pfile_in_zip_read->rest_read_compressed = s->cur_file_info.compressed_size - position; - } - - s->pfile_in_zip_read->rest_read_uncompressed -= (position - s->pfile_in_zip_read->total_out_64); - s->pfile_in_zip_read->stream.total_out = (uint32_t)position; - s->pfile_in_zip_read->total_out_64 = position; - - return UNZ_OK; -} - -extern int ZEXPORT unzEndOfFile(unzFile file) -{ - unz64_s *s = NULL; - if (file == NULL) - return UNZ_PARAMERROR; - s = (unz64_s*)file; - if (s->pfile_in_zip_read == NULL) - return UNZ_PARAMERROR; - if (s->pfile_in_zip_read->rest_read_uncompressed == 0) - return 1; - return 0; -} diff --git a/Externals/minizip/unzip.h b/Externals/minizip/unzip.h index ea4c90a4ddcd..61cbd974e31f 100644 --- a/Externals/minizip/unzip.h +++ b/Externals/minizip/unzip.h @@ -1,306 +1,13 @@ -/* unzip.h -- IO for uncompress .zip files using zlib - Version 1.1, February 14h, 2010 - part of the MiniZip project - - Copyright (C) 1998-2010 Gilles Vollant - http://www.winimage.com/zLibDll/minizip.html - Modifications of Unzip for Zip64 - Copyright (C) 2007-2008 Even Rouault - Modifications for Zip64 support on both zip and unzip - Copyright (C) 2009-2010 Mathias Svensson - http://result42.com +/* unzip.h -- Compatibility layer shim + part of the minizip-ng project This program is distributed under the terms of the same license as zlib. See the accompanying LICENSE file for the full text of the license. */ -#ifndef _UNZ_H -#define _UNZ_H - -#ifdef __cplusplus -extern "C" { -#endif - -#ifndef _ZLIB_H -#include "zlib.h" -#endif - -#ifndef _ZLIBIOAPI_H -#include "ioapi.h" -#endif +#ifndef MZ_COMPAT_UNZIP +#define MZ_COMPAT_UNZIP -#ifdef HAVE_BZIP2 -#include "bzlib.h" -#endif +#include "mz_compat.h" -#define Z_BZIP2ED 12 - -#if defined(STRICTUNZIP) || defined(STRICTZIPUNZIP) -/* like the STRICT of WIN32, we define a pointer that cannot be converted - from (void*) without cast */ -typedef struct TagunzFile__ { int unused; } unz_file__; -typedef unz_file__ *unzFile; -#else -typedef voidp unzFile; #endif - -#define UNZ_OK (0) -#define UNZ_END_OF_LIST_OF_FILE (-100) -#define UNZ_ERRNO (Z_ERRNO) -#define UNZ_EOF (0) -#define UNZ_PARAMERROR (-102) -#define UNZ_BADZIPFILE (-103) -#define UNZ_INTERNALERROR (-104) -#define UNZ_CRCERROR (-105) -#define UNZ_BADPASSWORD (-106) - -/* unz_global_info structure contain global data about the ZIPfile - These data comes from the end of central dir */ -typedef struct unz_global_info64_s -{ - uint64_t number_entry; /* total number of entries in the central dir on this disk */ - uint32_t number_disk_with_CD; /* number the the disk with central dir, used for spanning ZIP*/ - uint16_t size_comment; /* size of the global comment of the zipfile */ -} unz_global_info64; - -typedef struct unz_global_info_s -{ - uint32_t number_entry; /* total number of entries in the central dir on this disk */ - uint32_t number_disk_with_CD; /* number the the disk with central dir, used for spanning ZIP*/ - uint16_t size_comment; /* size of the global comment of the zipfile */ -} unz_global_info; - -/* unz_file_info contain information about a file in the zipfile */ -typedef struct unz_file_info64_s -{ - uint16_t version; /* version made by 2 bytes */ - uint16_t version_needed; /* version needed to extract 2 bytes */ - uint16_t flag; /* general purpose bit flag 2 bytes */ - uint16_t compression_method; /* compression method 2 bytes */ - uint32_t dos_date; /* last mod file date in Dos fmt 4 bytes */ - uint32_t crc; /* crc-32 4 bytes */ - uint64_t compressed_size; /* compressed size 8 bytes */ - uint64_t uncompressed_size; /* uncompressed size 8 bytes */ - uint16_t size_filename; /* filename length 2 bytes */ - uint16_t size_file_extra; /* extra field length 2 bytes */ - uint16_t size_file_comment; /* file comment length 2 bytes */ - - uint32_t disk_num_start; /* disk number start 4 bytes */ - uint16_t internal_fa; /* internal file attributes 2 bytes */ - uint32_t external_fa; /* external file attributes 4 bytes */ - - uint64_t disk_offset; - - uint16_t size_file_extra_internal; -} unz_file_info64; - -typedef struct unz_file_info_s -{ - uint16_t version; /* version made by 2 bytes */ - uint16_t version_needed; /* version needed to extract 2 bytes */ - uint16_t flag; /* general purpose bit flag 2 bytes */ - uint16_t compression_method; /* compression method 2 bytes */ - uint32_t dos_date; /* last mod file date in Dos fmt 4 bytes */ - uint32_t crc; /* crc-32 4 bytes */ - uint32_t compressed_size; /* compressed size 4 bytes */ - uint32_t uncompressed_size; /* uncompressed size 4 bytes */ - uint16_t size_filename; /* filename length 2 bytes */ - uint16_t size_file_extra; /* extra field length 2 bytes */ - uint16_t size_file_comment; /* file comment length 2 bytes */ - - uint16_t disk_num_start; /* disk number start 2 bytes */ - uint16_t internal_fa; /* internal file attributes 2 bytes */ - uint32_t external_fa; /* external file attributes 4 bytes */ - - uint64_t disk_offset; -} unz_file_info; - -/***************************************************************************/ -/* Opening and close a zip file */ - -extern unzFile ZEXPORT unzOpen(const char *path); -extern unzFile ZEXPORT unzOpen64(const void *path); -/* Open a Zip file. - - path should contain the full path (by example, on a Windows XP computer - "c:\\zlib\\zlib113.zip" or on an Unix computer "zlib/zlib113.zip". - return NULL if zipfile cannot be opened or doesn't exist - return unzFile handle if no error - - NOTE: The "64" function take a const void *pointer, because the path is just the value passed to the - open64_file_func callback. Under Windows, if UNICODE is defined, using fill_fopen64_filefunc, the path - is a pointer to a wide unicode string (LPCTSTR is LPCWSTR), so const char *does not describe the reality */ - -extern unzFile ZEXPORT unzOpen2(const char *path, zlib_filefunc_def *pzlib_filefunc_def); -/* Open a Zip file, like unzOpen, but provide a set of file low level API for read/write operations */ -extern unzFile ZEXPORT unzOpen2_64(const void *path, zlib_filefunc64_def *pzlib_filefunc_def); -/* Open a Zip file, like unz64Open, but provide a set of file low level API for read/write 64-bit operations */ - -extern int ZEXPORT unzClose(unzFile file); -/* Close a ZipFile opened with unzOpen. If there is files inside the .Zip opened with unzOpenCurrentFile, - these files MUST be closed with unzipCloseCurrentFile before call unzipClose. - - return UNZ_OK if there is no error */ - -extern int ZEXPORT unzGetGlobalInfo(unzFile file, unz_global_info *pglobal_info); -extern int ZEXPORT unzGetGlobalInfo64(unzFile file, unz_global_info64 *pglobal_info); -/* Write info about the ZipFile in the *pglobal_info structure. - - return UNZ_OK if no error */ - -extern int ZEXPORT unzGetGlobalComment(unzFile file, char *comment, uint16_t comment_size); -/* Get the global comment string of the ZipFile, in the comment buffer. - - uSizeBuf is the size of the szComment buffer. - return the number of byte copied or an error code <0 */ - -/***************************************************************************/ -/* Reading the content of the current zipfile, you can open it, read data from it, and close it - (you can close it before reading all the file) */ - -extern int ZEXPORT unzOpenCurrentFile(unzFile file); -/* Open for reading data the current file in the zipfile. - - return UNZ_OK if no error */ - -extern int ZEXPORT unzOpenCurrentFilePassword(unzFile file, const char *password); -/* Open for reading data the current file in the zipfile. - password is a crypting password - - return UNZ_OK if no error */ - -extern int ZEXPORT unzOpenCurrentFile2(unzFile file, int *method, int *level, int raw); -/* Same as unzOpenCurrentFile, but open for read raw the file (not uncompress) - if raw==1 *method will receive method of compression, *level will receive level of compression - - NOTE: you can set level parameter as NULL (if you did not want known level, - but you CANNOT set method parameter as NULL */ - -extern int ZEXPORT unzOpenCurrentFile3(unzFile file, int *method, int *level, int raw, const char *password); -/* Same as unzOpenCurrentFile, but takes extra parameter password for encrypted files */ - -extern int ZEXPORT unzReadCurrentFile(unzFile file, voidp buf, uint32_t len); -/* Read bytes from the current file (opened by unzOpenCurrentFile) - buf contain buffer where data must be copied - len the size of buf. - - return the number of byte copied if somes bytes are copied - return 0 if the end of file was reached - return <0 with error code if there is an error (UNZ_ERRNO for IO error, or zLib error for uncompress error) */ - -extern int ZEXPORT unzGetCurrentFileInfo(unzFile file, unz_file_info *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size); -extern int ZEXPORT unzGetCurrentFileInfo64(unzFile file, unz_file_info64 *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size); -/* Get Info about the current file - - pfile_info if != NULL, the *pfile_info structure will contain somes info about the current file - filename if != NULL, the file name string will be copied in filename - filename_size is the size of the filename buffer - extrafield if != NULL, the extra field information from the central header will be copied in to - extrafield_size is the size of the extraField buffer - comment if != NULL, the comment string of the file will be copied in to - comment_size is the size of the comment buffer */ - -extern int ZEXPORT unzGetLocalExtrafield(unzFile file, voidp buf, uint32_t len); -/* Read extra field from the current file (opened by unzOpenCurrentFile) - This is the local-header version of the extra field (sometimes, there is - more info in the local-header version than in the central-header) - - if buf == NULL, it return the size of the local extra field - if buf != NULL, len is the size of the buffer, the extra header is copied in buf. - - return number of bytes copied in buf, or (if <0) the error code */ - -extern int ZEXPORT unzCloseCurrentFile(unzFile file); -/* Close the file in zip opened with unzOpenCurrentFile - - return UNZ_CRCERROR if all the file was read but the CRC is not good */ - -/***************************************************************************/ -/* Browse the directory of the zipfile */ - -typedef int (*unzFileNameComparer)(unzFile file, const char *filename1, const char *filename2); -typedef int (*unzIteratorFunction)(unzFile file); -typedef int (*unzIteratorFunction2)(unzFile file, unz_file_info64 *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size); - -extern int ZEXPORT unzGoToFirstFile(unzFile file); -/* Set the current file of the zipfile to the first file. - - return UNZ_OK if no error */ - -extern int ZEXPORT unzGoToFirstFile2(unzFile file, unz_file_info64 *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size); -/* Set the current file of the zipfile to the first file and retrieves the current info on success. - Not as seek intensive as unzGoToFirstFile + unzGetCurrentFileInfo. - - return UNZ_OK if no error */ - -extern int ZEXPORT unzGoToNextFile(unzFile file); -/* Set the current file of the zipfile to the next file. - - return UNZ_OK if no error - return UNZ_END_OF_LIST_OF_FILE if the actual file was the latest */ - -extern int ZEXPORT unzGoToNextFile2(unzFile file, unz_file_info64 *pfile_info, char *filename, - uint16_t filename_size, void *extrafield, uint16_t extrafield_size, char *comment, uint16_t comment_size); -/* Set the current file of the zipfile to the next file and retrieves the current - info on success. Does less seeking around than unzGotoNextFile + unzGetCurrentFileInfo. - - return UNZ_OK if no error - return UNZ_END_OF_LIST_OF_FILE if the actual file was the latest */ - -extern int ZEXPORT unzLocateFile(unzFile file, const char *filename, unzFileNameComparer filename_compare_func); -/* Try locate the file szFileName in the zipfile. For custom filename comparison pass in comparison function. - - return UNZ_OK if the file is found (it becomes the current file) - return UNZ_END_OF_LIST_OF_FILE if the file is not found */ - -/***************************************************************************/ -/* Raw access to zip file */ - -typedef struct unz_file_pos_s -{ - uint32_t pos_in_zip_directory; /* offset in zip file directory */ - uint32_t num_of_file; /* # of file */ -} unz_file_pos; - -extern int ZEXPORT unzGetFilePos(unzFile file, unz_file_pos *file_pos); -extern int ZEXPORT unzGoToFilePos(unzFile file, unz_file_pos *file_pos); - -typedef struct unz64_file_pos_s -{ - uint64_t pos_in_zip_directory; /* offset in zip file directory */ - uint64_t num_of_file; /* # of file */ -} unz64_file_pos; - -extern int ZEXPORT unzGetFilePos64(unzFile file, unz64_file_pos *file_pos); -extern int ZEXPORT unzGoToFilePos64(unzFile file, const unz64_file_pos *file_pos); - -extern int32_t ZEXPORT unzGetOffset(unzFile file); -extern int64_t ZEXPORT unzGetOffset64(unzFile file); -/* Get the current file offset */ - -extern int ZEXPORT unzSetOffset(unzFile file, uint32_t pos); -extern int ZEXPORT unzSetOffset64(unzFile file, uint64_t pos); -/* Set the current file offset */ - -extern int32_t ZEXPORT unzTell(unzFile file); -extern int64_t ZEXPORT unzTell64(unzFile file); -/* return current position in uncompressed data */ - -extern int ZEXPORT unzSeek(unzFile file, uint32_t offset, int origin); -extern int ZEXPORT unzSeek64(unzFile file, uint64_t offset, int origin); -/* Seek within the uncompressed data if compression method is storage */ - -extern int ZEXPORT unzEndOfFile(unzFile file); -/* return 1 if the end of file was reached, 0 elsewhere */ - -/***************************************************************************/ - -#ifdef __cplusplus -} -#endif - -#endif /* _UNZ_H */ diff --git a/Externals/minizip/zip.h b/Externals/minizip/zip.h new file mode 100644 index 000000000000..cf38ac91a04f --- /dev/null +++ b/Externals/minizip/zip.h @@ -0,0 +1,13 @@ +/* zip.h -- Compatibility layer shim + part of the minizip-ng project + + This program is distributed under the terms of the same license as zlib. + See the accompanying LICENSE file for the full text of the license. +*/ + +#ifndef MZ_COMPAT_ZIP +#define MZ_COMPAT_ZIP + +#include "mz_compat.h" + +#endif From 14be17effe5afc2e34bd10c4dae544fd931bb29c Mon Sep 17 00:00:00 2001 From: OatmealDome Date: Mon, 7 Feb 2022 01:31:47 -0500 Subject: [PATCH 016/237] MoltenVK: Update to v1.1.7 (Vulkan SDK 1.3.204) --- Externals/MoltenVK/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals/MoltenVK/CMakeLists.txt b/Externals/MoltenVK/CMakeLists.txt index 6accf5904b3f..9c198ab82502 100644 --- a/Externals/MoltenVK/CMakeLists.txt +++ b/Externals/MoltenVK/CMakeLists.txt @@ -1,6 +1,6 @@ include(ExternalProject) -set(MOLTENVK_VERSION "v1.1.6") +set(MOLTENVK_VERSION "v1.1.7") ExternalProject_Add(MoltenVK GIT_REPOSITORY https://github.com/KhronosGroup/MoltenVK.git From bdc55f9557477ecbaabf3a98f2560e461bc44ba9 Mon Sep 17 00:00:00 2001 From: OatmealDome Date: Mon, 7 Feb 2022 15:46:20 -0500 Subject: [PATCH 017/237] RenderState: Set source and destination alpha factors in logic op workaround --- Source/Core/VideoCommon/RenderState.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Source/Core/VideoCommon/RenderState.cpp b/Source/Core/VideoCommon/RenderState.cpp index 15559ae67fa6..3b3554b4fe74 100644 --- a/Source/Core/VideoCommon/RenderState.cpp +++ b/Source/Core/VideoCommon/RenderState.cpp @@ -196,7 +196,9 @@ void BlendingState::ApproximateLogicOpWithBlending() blendenable = true; subtract = approximations[u32(logicmode.Value())].subtract; srcfactor = approximations[u32(logicmode.Value())].srcfactor; + srcfactoralpha = approximations[u32(logicmode.Value())].srcfactor; dstfactor = approximations[u32(logicmode.Value())].dstfactor; + dstfactoralpha = approximations[u32(logicmode.Value())].dstfactor; } void SamplerState::Generate(const BPMemory& bp, u32 index) From a9b5188047abb2a5980d9fca148dba24a8e0bf5b Mon Sep 17 00:00:00 2001 From: OatmealDome Date: Tue, 8 Feb 2022 14:53:51 -0500 Subject: [PATCH 018/237] OGLMain: Set default value for bSupportsSettingObjectNames --- Source/Core/VideoBackends/OGL/OGLMain.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/Source/Core/VideoBackends/OGL/OGLMain.cpp b/Source/Core/VideoBackends/OGL/OGLMain.cpp index 4f4dc21d25b4..3e16b86ae869 100644 --- a/Source/Core/VideoBackends/OGL/OGLMain.cpp +++ b/Source/Core/VideoBackends/OGL/OGLMain.cpp @@ -111,6 +111,7 @@ void VideoBackend::InitBackendInfo() g_Config.backend_info.bSupportsBPTCTextures = false; g_Config.backend_info.bSupportsCoarseDerivatives = false; g_Config.backend_info.bSupportsTextureQueryLevels = false; + g_Config.backend_info.bSupportsSettingObjectNames = false; g_Config.backend_info.Adapters.clear(); From 0327e6acb4a4f883003fb00b04a8092a1914090a Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 24 Jan 2022 22:05:31 -0800 Subject: [PATCH 019/237] Use the same logic for lerp bias for color and alpha It doesn't make sense for alpha to add the bias ONLY when dividing by 2, while color doesn't apply the bias for divide by 2 only; hardware testing indicates that alpha should have the bias. This fixes the menus in Mario Kart Wii (https://bugs.dolphin-emu.org/issues/11909) but reintroduces the white rectangle in Fortune Street. This reverts commit 5aaa5141ed76acc3fffc70561a6b6e7fe3b9b470 (and several other matching changes elsewhere). --- Source/Core/VideoBackends/Software/Tev.cpp | 2 +- Source/Core/VideoCommon/PixelShaderGen.cpp | 12 ++++---- Source/Core/VideoCommon/UberShaderPixel.cpp | 32 +++++++++++---------- 3 files changed, 25 insertions(+), 21 deletions(-) diff --git a/Source/Core/VideoBackends/Software/Tev.cpp b/Source/Core/VideoBackends/Software/Tev.cpp index 28c6c5f29d4e..ee0a789ec612 100644 --- a/Source/Core/VideoBackends/Software/Tev.cpp +++ b/Source/Core/VideoBackends/Software/Tev.cpp @@ -297,7 +297,7 @@ void Tev::DrawAlphaRegular(const TevStageCombiner::AlphaCombiner& ac, const Inpu s32 temp = InputReg.a * (256 - c) + (InputReg.b * c); temp <<= m_ScaleLShiftLUT[u32(ac.scale.Value())]; - temp += (ac.scale != TevScale::Divide2) ? 0 : (ac.op == TevOp::Sub) ? 127 : 128; + temp += (ac.scale == TevScale::Divide2) ? 0 : (ac.op == TevOp::Sub) ? 127 : 128; temp = ac.op == TevOp::Sub ? (-temp >> 8) : (temp >> 8); s32 result = diff --git a/Source/Core/VideoCommon/PixelShaderGen.cpp b/Source/Core/VideoCommon/PixelShaderGen.cpp index 673c97ec9b2e..af21e6415636 100644 --- a/Source/Core/VideoCommon/PixelShaderGen.cpp +++ b/Source/Core/VideoCommon/PixelShaderGen.cpp @@ -852,7 +852,7 @@ uint WrapCoord(int coord, uint wrap, int size) {{ static void WriteStage(ShaderCode& out, const pixel_shader_uid_data* uid_data, int n, APIType api_type, bool stereo); static void WriteTevRegular(ShaderCode& out, std::string_view components, TevBias bias, TevOp op, - bool clamp, TevScale scale, bool alpha); + bool clamp, TevScale scale); static void WriteAlphaTest(ShaderCode& out, const pixel_shader_uid_data* uid_data, APIType api_type, bool per_pixel_depth, bool use_dual_source); static void WriteFog(ShaderCode& out, const pixel_shader_uid_data* uid_data); @@ -1677,7 +1677,7 @@ static void WriteStage(ShaderCode& out, const pixel_shader_uid_data* uid_data, i out.Write("\t{} = clamp(", tev_c_output_table[cc.dest]); if (cc.bias != TevBias::Compare) { - WriteTevRegular(out, "rgb", cc.bias, cc.op, cc.clamp, cc.scale, false); + WriteTevRegular(out, "rgb", cc.bias, cc.op, cc.clamp, cc.scale); } else { @@ -1710,7 +1710,7 @@ static void WriteStage(ShaderCode& out, const pixel_shader_uid_data* uid_data, i out.Write("\t{} = clamp(", tev_a_output_table[ac.dest]); if (ac.bias != TevBias::Compare) { - WriteTevRegular(out, "a", ac.bias, ac.op, ac.clamp, ac.scale, true); + WriteTevRegular(out, "a", ac.bias, ac.op, ac.clamp, ac.scale); } else { @@ -1742,7 +1742,7 @@ static void WriteStage(ShaderCode& out, const pixel_shader_uid_data* uid_data, i } static void WriteTevRegular(ShaderCode& out, std::string_view components, TevBias bias, TevOp op, - bool clamp, TevScale scale, bool alpha) + bool clamp, TevScale scale) { static constexpr Common::EnumMap tev_scale_table_left{ "", // Scale1 @@ -1780,12 +1780,14 @@ static void WriteTevRegular(ShaderCode& out, std::string_view components, TevBia // - c is scaled from 0..255 to 0..256, which allows dividing the result by 256 instead of 255 // - if scale is bigger than one, it is moved inside the lerp calculation for increased accuracy // - a rounding bias is added before dividing by 256 + // TODO: Is the rounding bias still added when the scale is divide by 2? Currently we do not + // apply it. out.Write("(((tevin_d.{}{}){})", components, tev_bias_table[bias], tev_scale_table_left[scale]); out.Write(" {} ", tev_op_table[op]); out.Write("(((((tevin_a.{0}<<8) + " "(tevin_b.{0}-tevin_a.{0})*(tevin_c.{0}+(tevin_c.{0}>>7))){1}){2})>>8)", components, tev_scale_table_left[scale], - ((scale == TevScale::Divide2) == alpha) ? tev_lerp_bias[op] : ""); + (scale != TevScale::Divide2) ? tev_lerp_bias[op] : ""); out.Write("){}", tev_scale_table_right[scale]); } diff --git a/Source/Core/VideoCommon/UberShaderPixel.cpp b/Source/Core/VideoCommon/UberShaderPixel.cpp index ebf4afa55f30..0ce13725d6ec 100644 --- a/Source/Core/VideoCommon/UberShaderPixel.cpp +++ b/Source/Core/VideoCommon/UberShaderPixel.cpp @@ -334,8 +334,8 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, const auto WriteTevLerp = [&out](std::string_view components) { out.Write( "// TEV's Linear Interpolate, plus bias, add/subtract and scale\n" - "int{0} tevLerp{0}(int{0} A, int{0} B, int{0} C, int{0} D, uint bias, bool op, bool alpha, " - "uint shift) {{\n" + "int{0} tevLerp{0}(int{0} A, int{0} B, int{0} C, int{0} D, uint bias, bool op, " + "uint scale) {{\n" " // Scale C from 0..255 to 0..256\n" " C += C >> 7;\n" "\n" @@ -344,12 +344,14 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, " else if (bias == 2u) D -= 128;\n" "\n" " int{0} lerp = (A << 8) + (B - A)*C;\n" - " if (shift != 3u) {{\n" - " lerp = lerp << shift;\n" - " D = D << shift;\n" + " if (scale != 3u) {{\n" + " lerp = lerp << scale;\n" + " D = D << scale;\n" " }}\n" "\n" - " if ((shift == 3u) == alpha)\n" + " // TODO: Is this rounding bias still added when the scale is divide by 2? Currently we " + "do not apply it.\n" + " if (scale != 3u)\n" " lerp = lerp + (op ? 127 : 128);\n" "\n" " int{0} result = lerp >> 8;\n" @@ -360,9 +362,9 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, " else // Add\n" " result = D + result;\n" "\n" - " // Most of the Shift was moved inside the lerp for improved precision\n" + " // Most of the Scale was moved inside the lerp for improved precision\n" " // But we still do the divide by 2 here\n" - " if (shift == 3u)\n" + " if (scale == 3u)\n" " result = result >> 1;\n" " return result;\n" "}}\n\n", @@ -810,13 +812,13 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, BitfieldExtract<&TevStageCombiner::ColorCombiner::op>("ss.cc")); out.Write(" bool color_clamp = bool({});\n", BitfieldExtract<&TevStageCombiner::ColorCombiner::clamp>("ss.cc")); - out.Write(" uint color_shift = {};\n", + out.Write(" uint color_scale = {};\n", BitfieldExtract<&TevStageCombiner::ColorCombiner::scale>("ss.cc")); out.Write(" uint color_dest = {};\n", BitfieldExtract<&TevStageCombiner::ColorCombiner::dest>("ss.cc")); out.Write( - " uint color_compare_op = color_shift << 1 | uint(color_op);\n" + " uint color_compare_op = color_scale << 1 | uint(color_op);\n" "\n" " int3 color_A = selectColorInput(s, ss, {0}colors_0, {0}colors_1, color_a) & " "int3(255, 255, 255);\n" @@ -831,8 +833,8 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, out.Write( " int3 color;\n" " if (color_bias != 3u) {{ // Normal mode\n" - " color = tevLerp3(color_A, color_B, color_C, color_D, color_bias, color_op, false, " - "color_shift);\n" + " color = tevLerp3(color_A, color_B, color_C, color_D, color_bias, color_op, " + "color_scale);\n" " }} else {{ // Compare mode\n" " // op 6 and 7 do a select per color channel\n" " if (color_compare_op == 6u) {{\n" @@ -880,13 +882,13 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, BitfieldExtract<&TevStageCombiner::AlphaCombiner::op>("ss.ac")); out.Write(" bool alpha_clamp = bool({});\n", BitfieldExtract<&TevStageCombiner::AlphaCombiner::clamp>("ss.ac")); - out.Write(" uint alpha_shift = {};\n", + out.Write(" uint alpha_scale = {};\n", BitfieldExtract<&TevStageCombiner::AlphaCombiner::scale>("ss.ac")); out.Write(" uint alpha_dest = {};\n", BitfieldExtract<&TevStageCombiner::AlphaCombiner::dest>("ss.ac")); out.Write( - " uint alpha_compare_op = alpha_shift << 1 | uint(alpha_op);\n" + " uint alpha_compare_op = alpha_scale << 1 | uint(alpha_op);\n" "\n" " int alpha_A;\n" " int alpha_B;\n" @@ -904,7 +906,7 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, " int alpha;\n" " if (alpha_bias != 3u) {{ // Normal mode\n" " alpha = tevLerp(alpha_A, alpha_B, alpha_C, alpha_D, alpha_bias, alpha_op, " - "true, alpha_shift);\n" + "alpha_scale);\n" " }} else {{ // Compare mode\n" " if (alpha_compare_op == 6u) {{\n" " // TevCompareMode::A8, TevComparison::GT\n" From 444f6fd0cb84a673329cf33eacf8907bf96b5951 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 24 Jan 2022 22:48:43 -0800 Subject: [PATCH 020/237] Treat alpha as 0 if alpha is 1 for blending This removes the white box in fortune street again, without causing Mario Kart Wii to regress. --- Source/Core/VideoBackends/Software/Tev.cpp | 13 +++++++++++++ Source/Core/VideoCommon/PixelShaderGen.cpp | 12 ++++++++++++ Source/Core/VideoCommon/UberShaderPixel.cpp | 3 +++ 3 files changed, 28 insertions(+) diff --git a/Source/Core/VideoBackends/Software/Tev.cpp b/Source/Core/VideoBackends/Software/Tev.cpp index ee0a789ec612..690592040518 100644 --- a/Source/Core/VideoBackends/Software/Tev.cpp +++ b/Source/Core/VideoBackends/Software/Tev.cpp @@ -714,6 +714,19 @@ void Tev::Draw() if (!TevAlphaTest(output[ALP_C])) return; + // Hardware testing indicates that an alpha of 1 can pass an alpha test, + // but doesn't do anything in blending + // This situation is important for Mario Kart Wii's menus (they will render incorrectly if the + // alpha test for the FMV in the background fails, since they depend on depth for drawing a yellow + // border) and Fortune Street's gameplay (where a rectangle with an alpha value of 1 is drawn over + // the center of the screen several times, but those rectangles shouldn't be visible). + // Blending seems to result in no changes to the output with an alpha of 1, even if the input + // color is white. + // TODO: Investigate this further: we might be handling blending incorrectly in general (though + // there might not be any good way of changing blending behavior) + if (output[ALP_C] == 1) + output[ALP_C] = 0; + // z texture if (bpmem.ztex2.op != ZTexOp::Disabled) { diff --git a/Source/Core/VideoCommon/PixelShaderGen.cpp b/Source/Core/VideoCommon/PixelShaderGen.cpp index af21e6415636..ddb03f8e9665 100644 --- a/Source/Core/VideoCommon/PixelShaderGen.cpp +++ b/Source/Core/VideoCommon/PixelShaderGen.cpp @@ -1234,6 +1234,18 @@ ShaderCode GeneratePixelShaderCode(APIType api_type, const ShaderHostConfig& hos use_dual_source || use_shader_blend); } + // This situation is important for Mario Kart Wii's menus (they will render incorrectly if the + // alpha test for the FMV in the background fails, since they depend on depth for drawing a yellow + // border) and Fortune Street's gameplay (where a rectangle with an alpha value of 1 is drawn over + // the center of the screen several times, but those rectangles shouldn't be visible). + // Blending seems to result in no changes to the output with an alpha of 1, even if the input + // color is white. + // TODO: Investigate this further: we might be handling blending incorrectly in general (though + // there might not be any good way of changing blending behavior) + out.Write("\t// Hardware testing indicates that an alpha of 1 can pass an alpha test,\n" + "\t// but doesn't do anything in blending\n" + "\tif (prev.a == 1) prev.a = 0;\n"); + if (uid_data->zfreeze) { out.SetConstantsUsed(C_ZSLOPE, C_ZSLOPE); diff --git a/Source/Core/VideoCommon/UberShaderPixel.cpp b/Source/Core/VideoCommon/UberShaderPixel.cpp index 0ce13725d6ec..fa6ad7bc7590 100644 --- a/Source/Core/VideoCommon/UberShaderPixel.cpp +++ b/Source/Core/VideoCommon/UberShaderPixel.cpp @@ -1032,6 +1032,9 @@ ShaderCode GenPixelShader(APIType api_type, const ShaderHostConfig& host_config, " }}\n" "\n"); + out.Write(" // Hardware testing indicates that an alpha of 1 can pass an alpha test,\n" + " // but doesn't do anything in blending\n" + " if (TevResult.a == 1) TevResult.a = 0;\n"); // ========= // Dithering // ========= From 5dd07f73d4b91ff27a91ce2265a541462a3dfe2e Mon Sep 17 00:00:00 2001 From: JosJuice Date: Wed, 9 Feb 2022 21:42:45 +0100 Subject: [PATCH 021/237] Android: Fix the logic for getting the Riivolution path There is a Load path setting, so the Load part can't just be hardcoded. --- .../features/riivolution/ui/RiivolutionBootActivity.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java index c990f95dd746..7509aaf37ad7 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionBootActivity.java @@ -15,6 +15,7 @@ import org.dolphinemu.dolphinemu.R; import org.dolphinemu.dolphinemu.activities.EmulationActivity; import org.dolphinemu.dolphinemu.features.riivolution.model.RiivolutionPatches; +import org.dolphinemu.dolphinemu.features.settings.model.StringSetting; import org.dolphinemu.dolphinemu.utils.DirectoryInitialization; public class RiivolutionBootActivity extends AppCompatActivity @@ -51,9 +52,12 @@ protected void onCreate(Bundle savedInstanceState) int revision = intent.getIntExtra(ARG_REVISION, -1); int discNumber = intent.getIntExtra(ARG_DISC_NUMBER, -1); + String loadPath = StringSetting.MAIN_LOAD_PATH.getStringGlobal(); + if (loadPath.isEmpty()) + loadPath = DirectoryInitialization.getUserDirectory() + "/Load"; + TextView textSdRoot = findViewById(R.id.text_sd_root); - String riivolutionPath = DirectoryInitialization.getUserDirectory() + "/Load/Riivolution"; - textSdRoot.setText(getString(R.string.riivolution_sd_root, riivolutionPath)); + textSdRoot.setText(getString(R.string.riivolution_sd_root, loadPath + "/Riivolution")); Button buttonStart = findViewById(R.id.button_start); buttonStart.setOnClickListener((v) -> From af36b6c0553d0857acd8784e6b9718e63201e8ae Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Thu, 10 Feb 2022 17:30:23 +0100 Subject: [PATCH 022/237] HotkeyManager: Change defaults for GBA hotkeys so they don't conflict with common keyboard mappings. --- Source/Core/Core/HotkeyManager.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Source/Core/Core/HotkeyManager.cpp b/Source/Core/Core/HotkeyManager.cpp index 25b59d3943c6..02c01912562c 100644 --- a/Source/Core/Core/HotkeyManager.cpp +++ b/Source/Core/Core/HotkeyManager.cpp @@ -463,9 +463,9 @@ void HotkeyManager::LoadDefaults(const ControllerInterface& ciface) set_key_expression(HK_UNDO_SAVE_STATE, hotkey_string({"Shift", "F12"})); // GBA - set_key_expression(HK_GBA_LOAD, hotkey_string({"`Shift`", "`O`"})); - set_key_expression(HK_GBA_UNLOAD, hotkey_string({"`Shift`", "`W`"})); - set_key_expression(HK_GBA_RESET, hotkey_string({"`Shift`", "`R`"})); + set_key_expression(HK_GBA_LOAD, hotkey_string({"`Ctrl`", "`Shift`", "`O`"})); + set_key_expression(HK_GBA_UNLOAD, hotkey_string({"`Ctrl`", "`Shift`", "`W`"})); + set_key_expression(HK_GBA_RESET, hotkey_string({"`Ctrl`", "`Shift`", "`R`"})); #ifdef _WIN32 set_key_expression(HK_GBA_VOLUME_DOWN, "`SUBTRACT`"); From 989bdb8d6dc83625ea999de5b2f5f21b0a1226b1 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 12 Feb 2022 17:51:32 +0100 Subject: [PATCH 023/237] Common/CodeBlock: Call ResetCodePtr when decreasing region_size Fixes https://bugs.dolphin-emu.org/issues/12827. A description of what was going wrong: JitArm64::Init first calls CodeBlock::AllocCodeSpace, after which CodeBlock and Arm64Emitter consider us to have 96 MB of code space available. JitArm64::Init then calls AddChildCodeSpace, which is supposed to take 64 MiB of that space and give it to m_far_code. CodeBlock's view of how much space there is gets updated from 96 MiB to 32 MiB, but due to the missing call, Arm64Emitter keeps thinking that it has 96 MiB of space available. The last thing JitArm64::Init does is to call ResetFreeMemoryRanges. This function asks Arm64Emitter how much code space is available and stores a range of that size in m_free_ranges_near, meaning that m_free_ranges_near ends up being backed by both nearcode and farcode! This is a ticking time bomb; as soon as we grab memory from m_free_ranges_near which is backed by farcode, we're in trouble. The crash I ran into in my testing was caused by fastmem code being allocated in farcode (our backpatch handler only handles accesses made from nearcode), but you may as well get errors caused by code intended for nearcode overwriting code intended for farcode or vice versa. So why did NBA Live 2005 crash when most games had no problems, and why was the bug bisected to the commit that increased the size of far code from 16 MiB to 64 MiB? Well, as long as we're only using the first 32 MiB of the big 96 MiB range, everything works. What happens with NBA Live 2005 (I have not investigated exactly through what mechanism this happens) is that at some point the range in m_free_ranges_near gets split into two ranges, one which is backed by nearcode and one which is backed by farcode. Dolphin prefers to select the biggest range available (we don't want to pick a tiny 1 KiB range that may not be able to fit the whole block we're about to emit, after all), and after increasing the size of farcode to 64 MiB, farcode is bigger than nearcode. --- Source/Core/Common/CodeBlock.h | 1 + 1 file changed, 1 insertion(+) diff --git a/Source/Core/Common/CodeBlock.h b/Source/Core/Common/CodeBlock.h index 9b66a47e9a08..aa35d3b782cc 100644 --- a/Source/Core/Common/CodeBlock.h +++ b/Source/Core/Common/CodeBlock.h @@ -104,6 +104,7 @@ class CodeBlock : public T ASSERT_MSG(DYNA_REC, child_size < GetSpaceLeft(), "Insufficient space for child allocation."); u8* child_region = region + region_size - child_size; region_size -= child_size; + ResetCodePtr(); return child_region; } void AddChildCodeSpace(CodeBlock* child, size_t child_size) From a05dd6b7e6b7ffeedc295d5c54c21eeab31c2427 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 31 Jan 2022 10:02:16 -0800 Subject: [PATCH 024/237] MemoryWidget: Fix improperly behaving radio buttons --- Source/Core/DolphinQt/Debugger/MemoryWidget.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Source/Core/DolphinQt/Debugger/MemoryWidget.cpp b/Source/Core/DolphinQt/Debugger/MemoryWidget.cpp index fe1307ec4160..04320c8e0de1 100644 --- a/Source/Core/DolphinQt/Debugger/MemoryWidget.cpp +++ b/Source/Core/DolphinQt/Debugger/MemoryWidget.cpp @@ -6,6 +6,7 @@ #include #include +#include #include #include #include @@ -103,6 +104,12 @@ void MemoryWidget::CreateWidgets() m_address_splitter->setCollapsible(0, false); m_address_splitter->setStretchFactor(1, 2); + auto* search_type_group = new QButtonGroup(this); + m_find_ascii = new QRadioButton(tr("ASCII")); + m_find_hex = new QRadioButton(tr("Hex string")); + search_type_group->addButton(m_find_ascii); + search_type_group->addButton(m_find_hex); + // Dump m_dump_mram = new QPushButton(tr("Dump &MRAM")); m_dump_exram = new QPushButton(tr("Dump &ExRAM")); @@ -116,8 +123,6 @@ void MemoryWidget::CreateWidgets() m_find_next = new QPushButton(tr("Find &Next")); m_find_previous = new QPushButton(tr("Find &Previous")); - m_find_ascii = new QRadioButton(tr("ASCII")); - m_find_hex = new QRadioButton(tr("Hex string")); m_result_label = new QLabel; search_layout->addWidget(m_find_next); From 99b3ac21e4e51664a67a07c1c6c175a67a352e9d Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 31 Jan 2022 10:12:55 -0800 Subject: [PATCH 025/237] NewBreakpointDialog: Fix improperly behaving radio buttons --- Source/Core/DolphinQt/Debugger/NewBreakpointDialog.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Source/Core/DolphinQt/Debugger/NewBreakpointDialog.cpp b/Source/Core/DolphinQt/Debugger/NewBreakpointDialog.cpp index 6a1a3eff7b4d..3e8bfae69adc 100644 --- a/Source/Core/DolphinQt/Debugger/NewBreakpointDialog.cpp +++ b/Source/Core/DolphinQt/Debugger/NewBreakpointDialog.cpp @@ -3,6 +3,7 @@ #include "DolphinQt/Debugger/NewBreakpointDialog.h" +#include #include #include #include @@ -31,10 +32,12 @@ NewBreakpointDialog::NewBreakpointDialog(BreakpointWidget* parent) void NewBreakpointDialog::CreateWidgets() { m_buttons = new QDialogButtonBox(QDialogButtonBox::Ok | QDialogButtonBox::Cancel); + auto* type_group = new QButtonGroup(this); // Instruction BP m_instruction_bp = new QRadioButton(tr("Instruction Breakpoint")); m_instruction_bp->setChecked(true); + type_group->addButton(m_instruction_bp); m_instruction_box = new QGroupBox; m_instruction_address = new QLineEdit; @@ -45,11 +48,15 @@ void NewBreakpointDialog::CreateWidgets() // Memory BP m_memory_bp = new QRadioButton(tr("Memory Breakpoint")); + type_group->addButton(m_memory_bp); m_memory_box = new QGroupBox; + auto* memory_type_group = new QButtonGroup(this); m_memory_use_address = new QRadioButton(tr("Address")); m_memory_use_address->setChecked(true); + memory_type_group->addButton(m_memory_use_address); // i18n: A range of memory addresses m_memory_use_range = new QRadioButton(tr("Range")); + memory_type_group->addButton(m_memory_use_range); m_memory_address_from = new QLineEdit; m_memory_address_to = new QLineEdit; m_memory_address_from_label = new QLabel; // Set by OnAddressTypeChanged From 0daee4fe9f4cab476f71224a7d2930ac4b78d686 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 11:39:45 -0800 Subject: [PATCH 026/237] CheatSearchFactoryWidget: Set QButtonGroup's parent --- Source/Core/DolphinQt/CheatSearchFactoryWidget.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/DolphinQt/CheatSearchFactoryWidget.cpp b/Source/Core/DolphinQt/CheatSearchFactoryWidget.cpp index 6e566600f8b9..96dccfd2408d 100644 --- a/Source/Core/DolphinQt/CheatSearchFactoryWidget.cpp +++ b/Source/Core/DolphinQt/CheatSearchFactoryWidget.cpp @@ -55,7 +55,7 @@ void CheatSearchFactoryWidget::CreateWidgets() auto* custom_address_space_layout = new QVBoxLayout(); custom_address_space_layout->setMargin(6); - auto* custom_address_space_button_group = new QButtonGroup(); + auto* custom_address_space_button_group = new QButtonGroup(this); m_custom_virtual_address_space = new QRadioButton(tr("Use virtual addresses when possible")); m_custom_virtual_address_space->setChecked(true); m_custom_physical_address_space = new QRadioButton(tr("Use physical addresses")); From f2f9df7541720217b6375b78cc7b8e70e0b958f9 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 31 Jan 2022 10:19:23 -0800 Subject: [PATCH 027/237] Remove unused includes of QButtonGroup --- Source/Core/DolphinQt/Config/ARCodeWidget.cpp | 1 - .../ControllerInterface/DualShockUDPClientAddServerDialog.cpp | 1 - Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.cpp | 1 - Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.h | 1 - 4 files changed, 4 deletions(-) diff --git a/Source/Core/DolphinQt/Config/ARCodeWidget.cpp b/Source/Core/DolphinQt/Config/ARCodeWidget.cpp index b45cfb3772c4..b528bfb50904 100644 --- a/Source/Core/DolphinQt/Config/ARCodeWidget.cpp +++ b/Source/Core/DolphinQt/Config/ARCodeWidget.cpp @@ -5,7 +5,6 @@ #include -#include #include #include #include diff --git a/Source/Core/DolphinQt/Config/ControllerInterface/DualShockUDPClientAddServerDialog.cpp b/Source/Core/DolphinQt/Config/ControllerInterface/DualShockUDPClientAddServerDialog.cpp index 790b6ca6ce14..84185da00d76 100644 --- a/Source/Core/DolphinQt/Config/ControllerInterface/DualShockUDPClientAddServerDialog.cpp +++ b/Source/Core/DolphinQt/Config/ControllerInterface/DualShockUDPClientAddServerDialog.cpp @@ -5,7 +5,6 @@ #include -#include #include #include #include diff --git a/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.cpp b/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.cpp index d2272d03b0de..1024816e227a 100644 --- a/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.cpp +++ b/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.cpp @@ -3,7 +3,6 @@ #include "DolphinQt/Settings/USBDeviceAddToWhitelistDialog.h" -#include #include #include #include diff --git a/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.h b/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.h index 1b0ebcdccc2c..5ae1199f2e0b 100644 --- a/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.h +++ b/Source/Core/DolphinQt/Settings/USBDeviceAddToWhitelistDialog.h @@ -7,7 +7,6 @@ class QTimer; class QDialog; -class QButtonGroup; class QHeaderView; class QLabel; class QLineEdit; From ce52ea35ecd0645b40169c9a2fff3a627f84b207 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 16:14:05 -0800 Subject: [PATCH 028/237] Jit: Replace magic 32 with GPFifo::GATHER_PIPE_SIZE --- Source/Core/Core/HW/GPFifo.h | 5 +---- Source/Core/Core/PowerPC/Jit64/Jit.cpp | 3 ++- Source/Core/Core/PowerPC/JitArm64/Jit.cpp | 3 ++- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Source/Core/Core/HW/GPFifo.h b/Source/Core/Core/HW/GPFifo.h index 703f2fa1e4c2..95c100a1ebff 100644 --- a/Source/Core/Core/HW/GPFifo.h +++ b/Source/Core/Core/HW/GPFifo.h @@ -9,10 +9,7 @@ class PointerWrap; namespace GPFifo { -enum -{ - GATHER_PIPE_SIZE = 32 -}; +constexpr u32 GATHER_PIPE_SIZE = 32; // Init void Init(); diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.cpp b/Source/Core/Core/PowerPC/Jit64/Jit.cpp index b54c9ca74a79..903bc826fbf2 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit.cpp @@ -1018,7 +1018,8 @@ bool Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC) bool gatherPipeIntCheck = js.fifoWriteAddresses.find(op.address) != js.fifoWriteAddresses.end(); // Gather pipe writes using an immediate address are explicitly tracked. - if (jo.optimizeGatherPipe && (js.fifoBytesSinceCheck >= 32 || js.mustCheckFifo)) + if (jo.optimizeGatherPipe && + (js.fifoBytesSinceCheck >= GPFifo::GATHER_PIPE_SIZE || js.mustCheckFifo)) { js.fifoBytesSinceCheck = 0; js.mustCheckFifo = false; diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp index bdcf40fdfc34..113c8e0043b0 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp @@ -835,7 +835,8 @@ bool JitArm64::DoJit(u32 em_address, JitBlock* b, u32 nextPC) // Gather pipe writes using a non-immediate address are discovered by profiling. bool gatherPipeIntCheck = js.fifoWriteAddresses.find(op.address) != js.fifoWriteAddresses.end(); - if (jo.optimizeGatherPipe && (js.fifoBytesSinceCheck >= 32 || js.mustCheckFifo)) + if (jo.optimizeGatherPipe && + (js.fifoBytesSinceCheck >= GPFifo::GATHER_PIPE_SIZE || js.mustCheckFifo)) { js.fifoBytesSinceCheck = 0; js.mustCheckFifo = false; From 9ffe9d3dc10a87187685f2a6d3dc95db1739332c Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 16:16:43 -0800 Subject: [PATCH 029/237] CommandProcessor: Remove redundant GATHER_PIPE_SIZE constant --- Source/Core/VideoCommon/CommandProcessor.cpp | 4 ++-- Source/Core/VideoCommon/CommandProcessor.h | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 51b67a9ead75..073b35bdafd0 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -382,7 +382,7 @@ void GatherPipeBursted() } else { - fifo.CPWritePointer.fetch_add(GATHER_PIPE_SIZE, std::memory_order_relaxed); + fifo.CPWritePointer.fetch_add(GPFifo::GATHER_PIPE_SIZE, std::memory_order_relaxed); } if (m_CPCtrlReg.GPReadEnable && m_CPCtrlReg.GPLinkEnable) @@ -396,7 +396,7 @@ void GatherPipeBursted() if (fifo.bFF_HiWatermark.load(std::memory_order_relaxed) != 0) CoreTiming::ForceExceptionCheck(0); - fifo.CPReadWriteDistance.fetch_add(GATHER_PIPE_SIZE, std::memory_order_seq_cst); + fifo.CPReadWriteDistance.fetch_add(GPFifo::GATHER_PIPE_SIZE, std::memory_order_seq_cst); Fifo::RunGpu(); diff --git a/Source/Core/VideoCommon/CommandProcessor.h b/Source/Core/VideoCommon/CommandProcessor.h index 2da7f1c84d94..ca2b6fbb287e 100644 --- a/Source/Core/VideoCommon/CommandProcessor.h +++ b/Source/Core/VideoCommon/CommandProcessor.h @@ -97,7 +97,6 @@ enum enum { - GATHER_PIPE_SIZE = 32, INT_CAUSE_CP = 0x800 }; From 55f8aa9921bc74a6e4172af5f9d22a4c40a3f887 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 16:28:24 -0800 Subject: [PATCH 030/237] VideoCommon/Fifo: Use GPFifo::GATHER_PIPE_SIZE instead of magic 32 --- Source/Core/VideoCommon/Fifo.cpp | 44 +++++++++++++++++--------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/Source/Core/VideoCommon/Fifo.cpp b/Source/Core/VideoCommon/Fifo.cpp index 9d558d056d27..ccee18f40ea6 100644 --- a/Source/Core/VideoCommon/Fifo.cpp +++ b/Source/Core/VideoCommon/Fifo.cpp @@ -17,6 +17,7 @@ #include "Core/Config/MainSettings.h" #include "Core/ConfigManager.h" #include "Core/CoreTiming.h" +#include "Core/HW/GPFifo.h" #include "Core/HW/Memmap.h" #include "Core/Host.h" #include "Core/System.h" @@ -249,13 +250,14 @@ void* PopFifoAuxBuffer(size_t size) // Description: RunGpuLoop() sends data through this function. static void ReadDataFromFifo(u32 readPtr) { - constexpr size_t len = 32; - if (len > static_cast(s_video_buffer + FIFO_SIZE - s_video_buffer_write_ptr)) + if (GPFifo::GATHER_PIPE_SIZE > + static_cast(s_video_buffer + FIFO_SIZE - s_video_buffer_write_ptr)) { const size_t existing_len = s_video_buffer_write_ptr - s_video_buffer_read_ptr; - if (len > static_cast(FIFO_SIZE - existing_len)) + if (GPFifo::GATHER_PIPE_SIZE > static_cast(FIFO_SIZE - existing_len)) { - PanicAlertFmt("FIFO out of bounds (existing {} + new {} > {})", existing_len, len, FIFO_SIZE); + PanicAlertFmt("FIFO out of bounds (existing {} + new {} > {})", existing_len, + GPFifo::GATHER_PIPE_SIZE, FIFO_SIZE); return; } memmove(s_video_buffer, s_video_buffer_read_ptr, existing_len); @@ -263,16 +265,15 @@ static void ReadDataFromFifo(u32 readPtr) s_video_buffer_read_ptr = s_video_buffer; } // Copy new video instructions to s_video_buffer for future use in rendering the new picture - Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, len); - s_video_buffer_write_ptr += len; + Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, GPFifo::GATHER_PIPE_SIZE); + s_video_buffer_write_ptr += GPFifo::GATHER_PIPE_SIZE; } // The deterministic_gpu_thread version. static void ReadDataFromFifoOnCPU(u32 readPtr) { - constexpr size_t len = 32; u8* write_ptr = s_video_buffer_write_ptr; - if (len > static_cast(s_video_buffer + FIFO_SIZE - write_ptr)) + if (GPFifo::GATHER_PIPE_SIZE > static_cast(s_video_buffer + FIFO_SIZE - write_ptr)) { // We can't wrap around while the GPU is working on the data. // This should be very rare due to the reset in SyncGPU. @@ -290,17 +291,18 @@ static void ReadDataFromFifoOnCPU(u32 readPtr) } write_ptr = s_video_buffer_write_ptr; const size_t existing_len = write_ptr - s_video_buffer_pp_read_ptr; - if (len > static_cast(FIFO_SIZE - existing_len)) + if (GPFifo::GATHER_PIPE_SIZE > static_cast(FIFO_SIZE - existing_len)) { - PanicAlertFmt("FIFO out of bounds (existing {} + new {} > {})", existing_len, len, FIFO_SIZE); + PanicAlertFmt("FIFO out of bounds (existing {} + new {} > {})", existing_len, + GPFifo::GATHER_PIPE_SIZE, FIFO_SIZE); return; } } - Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, len); + Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, GPFifo::GATHER_PIPE_SIZE); s_video_buffer_pp_read_ptr = OpcodeDecoder::RunFifo( - DataReader(s_video_buffer_pp_read_ptr, write_ptr + len), nullptr); + DataReader(s_video_buffer_pp_read_ptr, write_ptr + GPFifo::GATHER_PIPE_SIZE), nullptr); // This would have to be locked if the GPU thread didn't spin. - s_video_buffer_write_ptr = write_ptr + len; + s_video_buffer_write_ptr = write_ptr + GPFifo::GATHER_PIPE_SIZE; } void ResetVideoBuffer() @@ -362,20 +364,22 @@ void RunGpuLoop() if (readPtr == fifo.CPEnd.load(std::memory_order_relaxed)) readPtr = fifo.CPBase.load(std::memory_order_relaxed); else - readPtr += 32; + readPtr += GPFifo::GATHER_PIPE_SIZE; - ASSERT_MSG(COMMANDPROCESSOR, - (s32)fifo.CPReadWriteDistance.load(std::memory_order_relaxed) - 32 >= 0, + const s32 distance = + static_cast(fifo.CPReadWriteDistance.load(std::memory_order_relaxed)) - + GPFifo::GATHER_PIPE_SIZE; + ASSERT_MSG(COMMANDPROCESSOR, distance >= 0, "Negative fifo.CPReadWriteDistance = {} in FIFO Loop !\nThat can produce " "instability in the game. Please report it.", - fifo.CPReadWriteDistance.load(std::memory_order_relaxed) - 32); + distance); u8* write_ptr = s_video_buffer_write_ptr; s_video_buffer_read_ptr = OpcodeDecoder::RunFifo( DataReader(s_video_buffer_read_ptr, write_ptr), &cyclesExecuted); fifo.CPReadPointer.store(readPtr, std::memory_order_relaxed); - fifo.CPReadWriteDistance.fetch_sub(32, std::memory_order_seq_cst); + fifo.CPReadWriteDistance.fetch_sub(GPFifo::GATHER_PIPE_SIZE, std::memory_order_seq_cst); if ((write_ptr - s_video_buffer_read_ptr) == 0) { fifo.SafeCPReadPointer.store(fifo.CPReadPointer.load(std::memory_order_relaxed), @@ -498,10 +502,10 @@ static int RunGpuOnCpu(int ticks) } else { - fifo.CPReadPointer.fetch_add(32, std::memory_order_relaxed); + fifo.CPReadPointer.fetch_add(GPFifo::GATHER_PIPE_SIZE, std::memory_order_relaxed); } - fifo.CPReadWriteDistance.fetch_sub(32, std::memory_order_relaxed); + fifo.CPReadWriteDistance.fetch_sub(GPFifo::GATHER_PIPE_SIZE, std::memory_order_relaxed); } CommandProcessor::SetCPStatusFromGPU(); From 1c74867c71ffd68dc0712599081153f22229afa3 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 16:31:04 -0800 Subject: [PATCH 031/237] GPFifo: Make s_gather_pipe size new constant GATHER_PIPE_EXTRA_SIZE --- Source/Core/Core/HW/GPFifo.cpp | 2 +- Source/Core/Core/HW/GPFifo.h | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Source/Core/Core/HW/GPFifo.cpp b/Source/Core/Core/HW/GPFifo.cpp index a2c568bb38e8..ef05946c2b92 100644 --- a/Source/Core/Core/HW/GPFifo.cpp +++ b/Source/Core/Core/HW/GPFifo.cpp @@ -29,7 +29,7 @@ namespace GPFifo // the same function could use both methods. Compile 2 different versions of each such block? // More room for the fastmodes -alignas(32) static u8 s_gather_pipe[GATHER_PIPE_SIZE * 16]; +alignas(GATHER_PIPE_SIZE) static u8 s_gather_pipe[GATHER_PIPE_EXTRA_SIZE]; static size_t GetGatherPipeCount() { diff --git a/Source/Core/Core/HW/GPFifo.h b/Source/Core/Core/HW/GPFifo.h index 95c100a1ebff..2925e82c0533 100644 --- a/Source/Core/Core/HW/GPFifo.h +++ b/Source/Core/Core/HW/GPFifo.h @@ -10,6 +10,7 @@ class PointerWrap; namespace GPFifo { constexpr u32 GATHER_PIPE_SIZE = 32; +constexpr u32 GATHER_PIPE_EXTRA_SIZE = GATHER_PIPE_SIZE * 16; // Init void Init(); From 095803d1e965b89bd3010f8b426eec85b17a1f45 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 17:34:12 -0800 Subject: [PATCH 032/237] CommandProcessor: Move contents of second unknown opcode panic alert to the log This way, the information is always available, and users don't have to deal with a wall of meaningless information. --- Source/Core/VideoCommon/CommandProcessor.cpp | 52 ++++++++------------ 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 073b35bdafd0..81f6118a94d3 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -634,41 +634,29 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) "Further errors will be sent to the Video Backend log and\n" "Dolphin will now likely crash or hang. Enjoy.", cmd_byte, fmt::ptr(buffer), preprocess); - - PanicAlertFmt("Illegal command {:02x}\n" - "CPBase: {:#010x}\n" - "CPEnd: {:#010x}\n" - "CPHiWatermark: {:#010x}\n" - "CPLoWatermark: {:#010x}\n" - "CPReadWriteDistance: {:#010x}\n" - "CPWritePointer: {:#010x}\n" - "CPReadPointer: {:#010x}\n" - "CPBreakpoint: {:#010x}\n" - "bFF_GPReadEnable: {}\n" - "bFF_BPEnable: {}\n" - "bFF_BPInt: {}\n" - "bFF_Breakpoint: {}\n" - "bFF_GPLinkEnable: {}\n" - "bFF_HiWatermarkInt: {}\n" - "bFF_LoWatermarkInt: {}\n", - cmd_byte, fifo.CPBase.load(std::memory_order_relaxed), - fifo.CPEnd.load(std::memory_order_relaxed), fifo.CPHiWatermark, - fifo.CPLoWatermark, fifo.CPReadWriteDistance.load(std::memory_order_relaxed), - fifo.CPWritePointer.load(std::memory_order_relaxed), - fifo.CPReadPointer.load(std::memory_order_relaxed), - fifo.CPBreakpoint.load(std::memory_order_relaxed), - fifo.bFF_GPReadEnable.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_BPEnable.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_BPInt.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_Breakpoint.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_GPLinkEnable.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_HiWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false"); } // We always generate this log message, though we only generate the panic alerts once. - ERROR_LOG_FMT(VIDEO, "FIFO: Unknown Opcode ({:#04x} @ {}, preprocessing = {})", cmd_byte, - fmt::ptr(buffer), preprocess ? "yes" : "no"); + ERROR_LOG_FMT(VIDEO, + "FIFO: Unknown Opcode {:#04x} @ {}, preprocessing = {}, CPBase: {:#010x}, CPEnd: " + "{:#010x}, CPHiWatermark: {:#010x}, CPLoWatermark: {:#010x}, CPReadWriteDistance: " + "{:#010x}, CPWritePointer: {:#010x}, CPReadPointer: {:#010x}, CPBreakpoint: " + "{:#010x}, bFF_GPReadEnable: {}, bFF_BPEnable: {}, bFF_BPInt: {}, bFF_Breakpoint: " + "{}, bFF_GPLinkEnable: {}, bFF_HiWatermarkInt: {}, bFF_LoWatermarkInt: {}", + cmd_byte, fmt::ptr(buffer), preprocess ? "yes" : "no", + fifo.CPBase.load(std::memory_order_relaxed), + fifo.CPEnd.load(std::memory_order_relaxed), fifo.CPHiWatermark, fifo.CPLoWatermark, + fifo.CPReadWriteDistance.load(std::memory_order_relaxed), + fifo.CPWritePointer.load(std::memory_order_relaxed), + fifo.CPReadPointer.load(std::memory_order_relaxed), + fifo.CPBreakpoint.load(std::memory_order_relaxed), + fifo.bFF_GPReadEnable.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_BPEnable.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_BPInt.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_Breakpoint.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_GPLinkEnable.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_HiWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false"); } } // namespace CommandProcessor From 68cdceb4bef1b6923c3d3039b2535209dd58db8b Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 17:44:07 -0800 Subject: [PATCH 033/237] CommandProcessor: Log PC and LR on unknown opcodes --- Source/Core/VideoCommon/CommandProcessor.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 81f6118a94d3..451cdcce9a0d 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -17,6 +17,7 @@ #include "Core/HW/GPFifo.h" #include "Core/HW/MMIO.h" #include "Core/HW/ProcessorInterface.h" +#include "Core/PowerPC/PowerPC.h" #include "Core/System.h" #include "VideoCommon/Fifo.h" @@ -637,12 +638,21 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) } // We always generate this log message, though we only generate the panic alerts once. + // + // PC and LR are generally inaccurate in dual-core and are still misleading in single-core + // due to the gather pipe queueing data. Changing GATHER_PIPE_SIZE to 1 and + // GATHER_PIPE_EXTRA_SIZE to 16 * 32 in GPFifo.h, and using the cached interpreter CPU emulation + // engine, can result in more accurate information (though it is still a bit delayed). + // PC and LR are meaningless when using the fifoplayer, and will generally not be helpful if the + // unknown opcode is inside of a display list. Also note that the changes in GPFifo.h are not + // accurate and may introduce timing issues. ERROR_LOG_FMT(VIDEO, "FIFO: Unknown Opcode {:#04x} @ {}, preprocessing = {}, CPBase: {:#010x}, CPEnd: " "{:#010x}, CPHiWatermark: {:#010x}, CPLoWatermark: {:#010x}, CPReadWriteDistance: " "{:#010x}, CPWritePointer: {:#010x}, CPReadPointer: {:#010x}, CPBreakpoint: " "{:#010x}, bFF_GPReadEnable: {}, bFF_BPEnable: {}, bFF_BPInt: {}, bFF_Breakpoint: " - "{}, bFF_GPLinkEnable: {}, bFF_HiWatermarkInt: {}, bFF_LoWatermarkInt: {}", + "{}, bFF_GPLinkEnable: {}, bFF_HiWatermarkInt: {}, bFF_LoWatermarkInt: {}, " + "approximate PC: {:08x}, approximate LR: {:08x}", cmd_byte, fmt::ptr(buffer), preprocess ? "yes" : "no", fifo.CPBase.load(std::memory_order_relaxed), fifo.CPEnd.load(std::memory_order_relaxed), fifo.CPHiWatermark, fifo.CPLoWatermark, @@ -656,7 +666,7 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) fifo.bFF_Breakpoint.load(std::memory_order_relaxed) ? "true" : "false", fifo.bFF_GPLinkEnable.load(std::memory_order_relaxed) ? "true" : "false", fifo.bFF_HiWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false"); + fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", PC, LR); } } // namespace CommandProcessor From 97482a61c66d2a70bf375985ff27ec1780c0f961 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sat, 12 Feb 2022 18:29:59 -0800 Subject: [PATCH 034/237] CommandProcessor: Ignore unknown opcode for 0x3f --- Source/Core/VideoCommon/CommandProcessor.cpp | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 451cdcce9a0d..2203e235a70b 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -616,12 +616,19 @@ void SetCpClearRegister() void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) { - // Datel software uses 0x01 during startup, and Mario Party 5's Wiggler capsule - // accidentally uses 0x01-0x03 due to sending 4 more vertices than intended. - // Hardware testing indicates that 0x01-0x07 do nothing, so to avoid annoying the user with + // Datel software uses 0x01 during startup, and Mario Party 5's Wiggler capsule accidentally uses + // 0x01-0x03 due to sending 4 more vertices than intended (see https://dolp.in/i8104). + // Prince of Persia: Rival Swords sends 0x3f if the home menu is opened during the intro cutscene + // due to a game bug resulting in an incorrect vertex desc that results in the float value 1.0, + // encoded as 0x3f800000, being parsed as an opcode (see https://dolp.in/i9203). + // + // Hardware testing indicates that these opcodes do nothing, so to avoid annoying the user with // spurious popups, we don't create a panic alert in those cases. Other unknown opcodes - // (such as 0x18) seem to result in hangs. - if (!s_is_fifo_error_seen && cmd_byte > 0x07) + // (such as 0x18) seem to result in actual hangs on real hardware, so the alert still is important + // to keep around for unexpected cases. + const bool suppress_panic_alert = (cmd_byte <= 0x7) || (cmd_byte == 0x3f); + + if (!s_is_fifo_error_seen && !suppress_panic_alert) { s_is_fifo_error_seen = true; From 4ea9287a097e26f1a8e6aca7c4416b4cca0dfc7a Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 13 Feb 2022 14:08:16 -0800 Subject: [PATCH 035/237] CommandProcessor: Move unknown opcode log message before the panic alert This way, the extra information is already in the log by the time the panic alert appears, which is slightly more convenient for debugging. --- Source/Core/VideoCommon/CommandProcessor.cpp | 32 ++++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 2203e235a70b..41c773429a9f 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -628,22 +628,6 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) // to keep around for unexpected cases. const bool suppress_panic_alert = (cmd_byte <= 0x7) || (cmd_byte == 0x3f); - if (!s_is_fifo_error_seen && !suppress_panic_alert) - { - s_is_fifo_error_seen = true; - - // TODO(Omega): Maybe dump FIFO to file on this error - PanicAlertFmtT("GFX FIFO: Unknown Opcode ({0:#04x} @ {1}, preprocess={2}).\n" - "This means one of the following:\n" - "* The emulated GPU got desynced, disabling dual core can help\n" - "* Command stream corrupted by some spurious memory bug\n" - "* This really is an unknown opcode (unlikely)\n" - "* Some other sort of bug\n\n" - "Further errors will be sent to the Video Backend log and\n" - "Dolphin will now likely crash or hang. Enjoy.", - cmd_byte, fmt::ptr(buffer), preprocess); - } - // We always generate this log message, though we only generate the panic alerts once. // // PC and LR are generally inaccurate in dual-core and are still misleading in single-core @@ -674,6 +658,22 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) fifo.bFF_GPLinkEnable.load(std::memory_order_relaxed) ? "true" : "false", fifo.bFF_HiWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", PC, LR); + + if (!s_is_fifo_error_seen && !suppress_panic_alert) + { + s_is_fifo_error_seen = true; + + // TODO(Omega): Maybe dump FIFO to file on this error + PanicAlertFmtT("GFX FIFO: Unknown Opcode ({0:#04x} @ {1}, preprocess={2}).\n" + "This means one of the following:\n" + "* The emulated GPU got desynced, disabling dual core can help\n" + "* Command stream corrupted by some spurious memory bug\n" + "* This really is an unknown opcode (unlikely)\n" + "* Some other sort of bug\n\n" + "Further errors will be sent to the Video Backend log and\n" + "Dolphin will now likely crash or hang. Enjoy.", + cmd_byte, fmt::ptr(buffer), preprocess); + } } } // namespace CommandProcessor From 07578d8f1dc47513da1b3378fd4386defd3accbd Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 13 Feb 2022 14:14:46 -0800 Subject: [PATCH 036/237] CommandProcessor: Log ignored unknown opcodes at warn level Large amounts of logging can have an impact on performance, so moving the ones that have been determined to not matter to the warn level gives a way to hide those messages without hiding actual errors (and also gives a fast visual way of distinguishing between ignored and non-ignored ones due to the different colors). --- Source/Core/VideoCommon/CommandProcessor.cpp | 46 +++++++++++--------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 41c773429a9f..890d7ebfa4bc 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -628,6 +628,9 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) // to keep around for unexpected cases. const bool suppress_panic_alert = (cmd_byte <= 0x7) || (cmd_byte == 0x3f); + const auto log_level = + suppress_panic_alert ? Common::Log::LogLevel::LWARNING : Common::Log::LogLevel::LERROR; + // We always generate this log message, though we only generate the panic alerts once. // // PC and LR are generally inaccurate in dual-core and are still misleading in single-core @@ -637,27 +640,28 @@ void HandleUnknownOpcode(u8 cmd_byte, const u8* buffer, bool preprocess) // PC and LR are meaningless when using the fifoplayer, and will generally not be helpful if the // unknown opcode is inside of a display list. Also note that the changes in GPFifo.h are not // accurate and may introduce timing issues. - ERROR_LOG_FMT(VIDEO, - "FIFO: Unknown Opcode {:#04x} @ {}, preprocessing = {}, CPBase: {:#010x}, CPEnd: " - "{:#010x}, CPHiWatermark: {:#010x}, CPLoWatermark: {:#010x}, CPReadWriteDistance: " - "{:#010x}, CPWritePointer: {:#010x}, CPReadPointer: {:#010x}, CPBreakpoint: " - "{:#010x}, bFF_GPReadEnable: {}, bFF_BPEnable: {}, bFF_BPInt: {}, bFF_Breakpoint: " - "{}, bFF_GPLinkEnable: {}, bFF_HiWatermarkInt: {}, bFF_LoWatermarkInt: {}, " - "approximate PC: {:08x}, approximate LR: {:08x}", - cmd_byte, fmt::ptr(buffer), preprocess ? "yes" : "no", - fifo.CPBase.load(std::memory_order_relaxed), - fifo.CPEnd.load(std::memory_order_relaxed), fifo.CPHiWatermark, fifo.CPLoWatermark, - fifo.CPReadWriteDistance.load(std::memory_order_relaxed), - fifo.CPWritePointer.load(std::memory_order_relaxed), - fifo.CPReadPointer.load(std::memory_order_relaxed), - fifo.CPBreakpoint.load(std::memory_order_relaxed), - fifo.bFF_GPReadEnable.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_BPEnable.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_BPInt.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_Breakpoint.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_GPLinkEnable.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_HiWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", - fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", PC, LR); + GENERIC_LOG_FMT( + Common::Log::LogType::VIDEO, log_level, + "FIFO: Unknown Opcode {:#04x} @ {}, preprocessing = {}, CPBase: {:#010x}, CPEnd: " + "{:#010x}, CPHiWatermark: {:#010x}, CPLoWatermark: {:#010x}, CPReadWriteDistance: " + "{:#010x}, CPWritePointer: {:#010x}, CPReadPointer: {:#010x}, CPBreakpoint: " + "{:#010x}, bFF_GPReadEnable: {}, bFF_BPEnable: {}, bFF_BPInt: {}, bFF_Breakpoint: " + "{}, bFF_GPLinkEnable: {}, bFF_HiWatermarkInt: {}, bFF_LoWatermarkInt: {}, " + "approximate PC: {:08x}, approximate LR: {:08x}", + cmd_byte, fmt::ptr(buffer), preprocess ? "yes" : "no", + fifo.CPBase.load(std::memory_order_relaxed), fifo.CPEnd.load(std::memory_order_relaxed), + fifo.CPHiWatermark, fifo.CPLoWatermark, + fifo.CPReadWriteDistance.load(std::memory_order_relaxed), + fifo.CPWritePointer.load(std::memory_order_relaxed), + fifo.CPReadPointer.load(std::memory_order_relaxed), + fifo.CPBreakpoint.load(std::memory_order_relaxed), + fifo.bFF_GPReadEnable.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_BPEnable.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_BPInt.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_Breakpoint.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_GPLinkEnable.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_HiWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", + fifo.bFF_LoWatermarkInt.load(std::memory_order_relaxed) ? "true" : "false", PC, LR); if (!s_is_fifo_error_seen && !suppress_panic_alert) { From 3b5faf90f30c8b470527e7cf498cbabf161559e9 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 16 Jan 2022 21:19:34 -0800 Subject: [PATCH 037/237] =?UTF-8?q?FifoAnalyzer:=20Fix=20"enumeration=20va?= =?UTF-8?q?lue=20=E2=80=98NotPresent=E2=80=99=20not=20handled=20in=20switc?= =?UTF-8?q?h"=20warning?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Source/Core/DolphinQt/FIFO/FIFOAnalyzer.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Source/Core/DolphinQt/FIFO/FIFOAnalyzer.cpp b/Source/Core/DolphinQt/FIFO/FIFOAnalyzer.cpp index be9daf57d324..4227e2af3879 100644 --- a/Source/Core/DolphinQt/FIFO/FIFOAnalyzer.cpp +++ b/Source/Core/DolphinQt/FIFO/FIFOAnalyzer.cpp @@ -704,6 +704,8 @@ class DescriptionCallback : public OpcodeDecoder::Callback case VertexComponentFormat::Direct: process_simple_component(component_sizes[vtx_attr.GetColorFormat(c)]); break; + case VertexComponentFormat::NotPresent: + break; } } for (u32 t = 0; t < vtx_desc.high.TexCoord.Size(); t++) From a720596771c448744ee3c069524630a3e343de1a Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 16 Jan 2022 21:20:56 -0800 Subject: [PATCH 038/237] GDB Stub: Fix typo mixing ppcState.spr and ppcState.sr This resulted in an out-of-bounds array access, since sr is only 16 entries long and SPR_IBAT0U evaluates to 528. --- Source/Core/Core/PowerPC/GDBStub.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/Core/PowerPC/GDBStub.cpp b/Source/Core/Core/PowerPC/GDBStub.cpp index 91bc4a465d5b..f09713594090 100644 --- a/Source/Core/Core/PowerPC/GDBStub.cpp +++ b/Source/Core/Core/PowerPC/GDBStub.cpp @@ -633,7 +633,7 @@ static void WriteRegister() } else if (id >= 88 && id < 104) { - PowerPC::ppcState.sr[SPR_IBAT0U + id - 88] = re32hex(bufptr); + PowerPC::ppcState.spr[SPR_IBAT0U + id - 88] = re32hex(bufptr); } else { From 0e23dfbb256783a41a0f400251d64d5f4c2c3474 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 16 Jan 2022 21:28:44 -0800 Subject: [PATCH 039/237] FifoDataFile: Stop ignoring size The size variable started to be unused when I created std::array variants of ReadArray, but we should follow it in case any files have fewer registers stored than they should (otherwise the remaining registers would end up with garbage data from later in the fifolog). Though, there probably aren't many fifologs where this is relevant. --- Source/Core/Core/FifoPlayer/FifoDataFile.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Source/Core/Core/FifoPlayer/FifoDataFile.cpp b/Source/Core/Core/FifoPlayer/FifoDataFile.cpp index 7bfdff7833f3..119e19d861cd 100644 --- a/Source/Core/Core/FifoPlayer/FifoDataFile.cpp +++ b/Source/Core/Core/FifoPlayer/FifoDataFile.cpp @@ -285,19 +285,19 @@ std::unique_ptr FifoDataFile::Load(const std::string& filename, bo u32 size = std::min(BP_MEM_SIZE, header.bpMemSize); file.Seek(header.bpMemOffset, File::SeekOrigin::Begin); - file.ReadArray(&dataFile->m_BPMem); + file.ReadArray(dataFile->m_BPMem.data(), size); size = std::min(CP_MEM_SIZE, header.cpMemSize); file.Seek(header.cpMemOffset, File::SeekOrigin::Begin); - file.ReadArray(&dataFile->m_CPMem); + file.ReadArray(dataFile->m_CPMem.data(), size); size = std::min(XF_MEM_SIZE, header.xfMemSize); file.Seek(header.xfMemOffset, File::SeekOrigin::Begin); - file.ReadArray(&dataFile->m_XFMem); + file.ReadArray(dataFile->m_XFMem.data(), size); size = std::min(XF_REGS_SIZE, header.xfRegsSize); file.Seek(header.xfRegsOffset, File::SeekOrigin::Begin); - file.ReadArray(&dataFile->m_XFRegs); + file.ReadArray(dataFile->m_XFRegs.data(), size); // Texture memory saving was added in version 4. dataFile->m_TexMem.fill(0); From a6d516dc9442faa24646bb4d09b5e190a7a5c0e0 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 16 Jan 2022 21:30:53 -0800 Subject: [PATCH 040/237] Fix shadowing variables in labmdas GCC generates warnings about these, although the variable being shadowed is not captured by the lambda. --- Source/Core/Core/NetPlayClient.cpp | 26 ++++++++++--------- .../Core/DolphinQt/RiivolutionBootWidget.cpp | 8 +++--- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/Source/Core/Core/NetPlayClient.cpp b/Source/Core/Core/NetPlayClient.cpp index 54aeb83e8312..6dcbb2e3538c 100644 --- a/Source/Core/Core/NetPlayClient.cpp +++ b/Source/Core/Core/NetPlayClient.cpp @@ -1747,18 +1747,20 @@ bool NetPlayClient::StartGame(const std::string& path) // boot game auto boot_session_data = std::make_unique(); - boot_session_data->SetWiiSyncData( - std::move(m_wii_sync_fs), std::move(m_wii_sync_titles), std::move(m_wii_sync_redirect_folder), - [] { - // on emulation end clean up the Wii save sync directory -- see OnSyncSaveDataWii() - const std::string path = File::GetUserPath(D_USER_IDX) + "Wii" GC_MEMCARD_NETPLAY DIR_SEP; - if (File::Exists(path)) - File::DeleteDirRecursively(path); - const std::string redirect_path = - File::GetUserPath(D_USER_IDX) + "Redirect" GC_MEMCARD_NETPLAY DIR_SEP; - if (File::Exists(redirect_path)) - File::DeleteDirRecursively(redirect_path); - }); + boot_session_data->SetWiiSyncData(std::move(m_wii_sync_fs), std::move(m_wii_sync_titles), + std::move(m_wii_sync_redirect_folder), [] { + // on emulation end clean up the Wii save sync directory -- + // see OnSyncSaveDataWii() + const std::string wii_path = File::GetUserPath(D_USER_IDX) + + "Wii" GC_MEMCARD_NETPLAY DIR_SEP; + if (File::Exists(wii_path)) + File::DeleteDirRecursively(wii_path); + const std::string redirect_path = + File::GetUserPath(D_USER_IDX) + + "Redirect" GC_MEMCARD_NETPLAY DIR_SEP; + if (File::Exists(redirect_path)) + File::DeleteDirRecursively(redirect_path); + }); m_dialog->BootGame(path, std::move(boot_session_data)); UpdateDevices(); diff --git a/Source/Core/DolphinQt/RiivolutionBootWidget.cpp b/Source/Core/DolphinQt/RiivolutionBootWidget.cpp index c70df8e770aa..ff19d8597a6d 100644 --- a/Source/Core/DolphinQt/RiivolutionBootWidget.cpp +++ b/Source/Core/DolphinQt/RiivolutionBootWidget.cpp @@ -207,10 +207,10 @@ void RiivolutionBootWidget::MakeGUIForParsedFile(std::string path, std::string r connect(selection, qOverload(&QComboBox::currentIndexChanged), this, [this, selection](int idx) { const auto gui_index = selection->currentData().value(); - auto& disc = m_discs[gui_index.m_disc_index].disc; - auto& section = disc.m_sections[gui_index.m_section_index]; - auto& option = section.m_options[gui_index.m_option_index]; - option.m_selected_choice = static_cast(gui_index.m_choice_index); + auto& selected_disc = m_discs[gui_index.m_disc_index].disc; + auto& selected_section = selected_disc.m_sections[gui_index.m_section_index]; + auto& selected_option = selected_section.m_options[gui_index.m_option_index]; + selected_option.m_selected_choice = static_cast(gui_index.m_choice_index); }); grid_layout->addWidget(label, row, 0, 1, 1); From 15f80f72342b7f8dcf24dd27fdc0d10c92f86914 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 16 Jan 2022 23:32:27 -0800 Subject: [PATCH 041/237] MathUtil: Mark lo in SaturatingCast as [[maybe_unused]] It's unused in the case that T is unsigned and dest is signed (e.g. SaturatingCast which appears SetIsoPaths in MainSettings.cpp) --- Source/Core/Common/MathUtil.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/Common/MathUtil.h b/Source/Core/Common/MathUtil.h index 9b31a9acd542..4f3af04dc50e 100644 --- a/Source/Core/Common/MathUtil.h +++ b/Source/Core/Common/MathUtil.h @@ -37,7 +37,7 @@ constexpr Dest SaturatingCast(T value) { static_assert(std::is_integral()); - constexpr Dest lo = std::numeric_limits::lowest(); + [[maybe_unused]] constexpr Dest lo = std::numeric_limits::lowest(); constexpr Dest hi = std::numeric_limits::max(); // T being a signed integer and Dest unsigned is a problematic case because the value will From 5f9e04be1d73b3c9ec8aa66defcedbc6f7106c46 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Sun, 16 Jan 2022 23:55:23 -0800 Subject: [PATCH 042/237] DSPJit: Suppress offsetof conditionally-supported warnings The DSP JIT only applies on x64, so if it doesn't work on esoteric compilers then that's not a problem. (And if it fails to compile, then it'll still produce an error on that platform, just no warnings on other platforms) --- Source/Core/Core/DSP/Jit/x64/DSPEmitter.cpp | 7 +++++++ Source/Core/Core/DSP/Jit/x64/DSPJitMultiplier.cpp | 7 +++++++ Source/Core/Core/DSP/Jit/x64/DSPJitRegCache.cpp | 7 +++++++ 3 files changed, 21 insertions(+) diff --git a/Source/Core/Core/DSP/Jit/x64/DSPEmitter.cpp b/Source/Core/Core/DSP/Jit/x64/DSPEmitter.cpp index bfe7d117ecd4..4960c4c99270 100644 --- a/Source/Core/Core/DSP/Jit/x64/DSPEmitter.cpp +++ b/Source/Core/Core/DSP/Jit/x64/DSPEmitter.cpp @@ -470,6 +470,10 @@ void DSPEmitter::CompileDispatcher() RET(); } +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Winvalid-offsetof" +#endif Gen::OpArg DSPEmitter::M_SDSP_pc() { return MDisp(R15, static_cast(offsetof(SDSP, pc))); @@ -503,5 +507,8 @@ Gen::OpArg DSPEmitter::M_SDSP_reg_stack_ptrs(size_t index) return MDisp(R15, static_cast(offsetof(SDSP, reg_stack_ptrs) + sizeof(SDSP::reg_stack_ptrs[0]) * index)); } +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif } // namespace DSP::JIT::x64 diff --git a/Source/Core/Core/DSP/Jit/x64/DSPJitMultiplier.cpp b/Source/Core/Core/DSP/Jit/x64/DSPJitMultiplier.cpp index d37e7fd4605e..617c92462aaa 100644 --- a/Source/Core/Core/DSP/Jit/x64/DSPJitMultiplier.cpp +++ b/Source/Core/Core/DSP/Jit/x64/DSPJitMultiplier.cpp @@ -149,7 +149,14 @@ void DSPEmitter::multiply_mulx(u8 axh0, u8 axh1) // direct use of prod regs by AX/AXWII (look @that part of ucode). void DSPEmitter::clrp(const UDSPInstruction opc) { +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Winvalid-offsetof" +#endif int offset = static_cast(offsetof(SDSP, r.prod.val)); +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif // 64bit move to memory does not work. use 2 32bits MOV(32, MDisp(R15, offset + 0 * sizeof(u32)), Imm32(0xfff00000U)); MOV(32, MDisp(R15, offset + 1 * sizeof(u32)), Imm32(0x001000ffU)); diff --git a/Source/Core/Core/DSP/Jit/x64/DSPJitRegCache.cpp b/Source/Core/Core/DSP/Jit/x64/DSPJitRegCache.cpp index 9e69daafab16..31becddf8de3 100644 --- a/Source/Core/Core/DSP/Jit/x64/DSPJitRegCache.cpp +++ b/Source/Core/Core/DSP/Jit/x64/DSPJitRegCache.cpp @@ -20,6 +20,10 @@ namespace DSP::JIT::x64 constexpr std::array s_allocation_order = { {R8, R9, R10, R11, R12, R13, R14, R15, RSI, RDI, RBX, RCX, RDX, RAX, RBP}}; +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Winvalid-offsetof" +#endif static Gen::OpArg GetRegisterPointer(size_t reg) { switch (reg) @@ -95,6 +99,9 @@ static Gen::OpArg GetRegisterPointer(size_t reg) return M(static_cast(nullptr)); } } +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif #define STATIC_REG_ACCS //#undef STATIC_REG_ACCS From 50d93499269fb3d8a444cea6ae4da6daad2a2c10 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 17 Jan 2022 00:21:58 -0800 Subject: [PATCH 043/237] Fix integer sign difference comparison warnings --- Source/Core/Core/Core.cpp | 2 +- Source/Core/DiscIO/VolumeVerifier.cpp | 7 ++++--- Source/Core/DolphinQt/Debugger/CodeViewWidget.cpp | 6 +++--- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/Source/Core/Core/Core.cpp b/Source/Core/Core/Core.cpp index a945d722761e..08b615f5bf87 100644 --- a/Source/Core/Core/Core.cpp +++ b/Source/Core/Core/Core.cpp @@ -1031,7 +1031,7 @@ int AddOnStateChangedCallback(StateChangedCallbackFunc callback) bool RemoveOnStateChangedCallback(int* handle) { - if (handle && *handle >= 0 && s_on_state_changed_callbacks.size() > *handle) + if (handle && *handle >= 0 && s_on_state_changed_callbacks.size() > static_cast(*handle)) { s_on_state_changed_callbacks[*handle] = StateChangedCallbackFunc(); *handle = -1; diff --git a/Source/Core/DiscIO/VolumeVerifier.cpp b/Source/Core/DiscIO/VolumeVerifier.cpp index 252cf427b66f..593fb49b33d7 100644 --- a/Source/Core/DiscIO/VolumeVerifier.cpp +++ b/Source/Core/DiscIO/VolumeVerifier.cpp @@ -1113,10 +1113,11 @@ void VolumeVerifier::Process() bytes_to_read = Common::AlignUp(content.size, 0x40); content_read = true; - if (m_content_index + 1 < m_content_offsets.size() && - m_content_offsets[m_content_index + 1] < m_progress + bytes_to_read) + const u16 next_content_index = m_content_index + 1; + if (next_content_index < m_content_offsets.size() && + m_content_offsets[next_content_index] < m_progress + bytes_to_read) { - excess_bytes = m_progress + bytes_to_read - m_content_offsets[m_content_index + 1]; + excess_bytes = m_progress + bytes_to_read - m_content_offsets[next_content_index]; } } else if (m_content_index < m_content_offsets.size() && diff --git a/Source/Core/DolphinQt/Debugger/CodeViewWidget.cpp b/Source/Core/DolphinQt/Debugger/CodeViewWidget.cpp index 77bb8c8bd38b..fb300a370feb 100644 --- a/Source/Core/DolphinQt/Debugger/CodeViewWidget.cpp +++ b/Source/Core/DolphinQt/Debugger/CodeViewWidget.cpp @@ -376,7 +376,7 @@ void CodeViewWidget::Update() void CodeViewWidget::CalculateBranchIndentation() { - const size_t rows = rowCount(); + const u32 rows = rowCount(); const size_t columns = m_branches.size(); if (rows < 1 || columns < 1) return; @@ -442,7 +442,7 @@ void CodeViewWidget::CalculateBranchIndentation() }; const u32 first_visible_addr = AddressForRow(0); - const u32 last_visible_addr = AddressForRow(static_cast(rows - 1)); + const u32 last_visible_addr = AddressForRow(rows - 1); if (first_visible_addr <= last_visible_addr) { @@ -456,7 +456,7 @@ void CodeViewWidget::CalculateBranchIndentation() // first_visible_addr to fffffffc, and the second for 00000000 to last_visible_addr. // That means we need to find the row corresponding to 00000000. int addr_zero_row = -1; - for (int row = 0; row < rows; row++) + for (u32 row = 0; row < rows; row++) { if (AddressForRow(row) == 0) { From d2ebbfb91a7d50b0db12010392a594c12bbf1c44 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 17 Jan 2022 00:40:46 -0800 Subject: [PATCH 044/237] GDB Stub: Make s_socket_context static --- Source/Core/Core/PowerPC/GDBStub.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/Core/PowerPC/GDBStub.cpp b/Source/Core/Core/PowerPC/GDBStub.cpp index f09713594090..2fe39b75fddd 100644 --- a/Source/Core/Core/PowerPC/GDBStub.cpp +++ b/Source/Core/Core/PowerPC/GDBStub.cpp @@ -40,7 +40,7 @@ typedef SSIZE_T ssize_t; namespace GDBStub { -std::optional s_socket_context; +static std::optional s_socket_context; #define GDB_BFR_MAX 10000 From fd511a689f5acb9cb563f6f267da836e7ad4f37b Mon Sep 17 00:00:00 2001 From: JosJuice Date: Mon, 14 Feb 2022 19:03:34 +0100 Subject: [PATCH 045/237] Jit: Skip redundant flushes This makes codegen faster (by perhaps 10-20% in the case of Jit64, I didn't measure too closely), which helps speed up NBA Live 2005 a little. But the game still has serious performance issues. --- Source/Core/Core/PowerPC/Jit64/Jit.cpp | 4 ++-- Source/Core/Core/PowerPC/JitArm64/Jit.cpp | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.cpp b/Source/Core/Core/PowerPC/Jit64/Jit.cpp index 903bc826fbf2..f43d25855f84 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit.cpp @@ -1184,8 +1184,8 @@ bool Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC) gpr.Discard(op.gprDiscardable); fpr.Discard(op.fprDiscardable); } - gpr.Flush(~op.gprInUse); - fpr.Flush(~op.fprInUse); + gpr.Flush(~op.gprInUse & (op.regsIn | op.regsOut)); + fpr.Flush(~op.fprInUse & (op.fregsIn | op.GetFregsOut())); if (opinfo->flags & FL_LOADSTORE) ++js.numLoadStoreInst; diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp index 113c8e0043b0..3be81d2c713e 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp @@ -971,8 +971,8 @@ bool JitArm64::DoJit(u32 em_address, JitBlock* b, u32 nextPC) gpr.DiscardRegisters(op.gprDiscardable); fpr.DiscardRegisters(op.fprDiscardable); } - gpr.StoreRegisters(~op.gprInUse); - fpr.StoreRegisters(~op.fprInUse); + gpr.StoreRegisters(~op.gprInUse & (op.regsIn | op.regsOut)); + fpr.StoreRegisters(~op.fprInUse & (op.fregsIn | op.GetFregsOut())); if (opinfo->flags & FL_LOADSTORE) ++js.numLoadStoreInst; From 3a4de2b306efd6136791f7d995eb3ddea153d011 Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Mon, 14 Feb 2022 21:49:22 +0100 Subject: [PATCH 046/237] MemArena: Use Common::DynamicLibrary for m_api_ms_win_core_memory_l1_1_6_handle. --- Source/Core/Common/MemArena.h | 3 ++- Source/Core/Common/MemArenaWin.cpp | 23 +++++++++++------------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Source/Core/Common/MemArena.h b/Source/Core/Common/MemArena.h index bdde5579c586..5aebeccc23ff 100644 --- a/Source/Core/Common/MemArena.h +++ b/Source/Core/Common/MemArena.h @@ -7,6 +7,7 @@ #include #include "Common/CommonTypes.h" +#include "Common/DynamicLibrary.h" namespace Common { @@ -106,7 +107,7 @@ class MemArena final std::vector m_regions; void* m_reserved_region = nullptr; void* m_memory_handle = nullptr; - void* m_api_ms_win_core_memory_l1_1_6_handle = nullptr; + Common::DynamicLibrary m_api_ms_win_core_memory_l1_1_6_handle; void* m_address_VirtualAlloc2 = nullptr; void* m_address_MapViewOfFile3 = nullptr; #else diff --git a/Source/Core/Common/MemArenaWin.cpp b/Source/Core/Common/MemArenaWin.cpp index 64346274786c..d94eb7c9d81e 100644 --- a/Source/Core/Common/MemArenaWin.cpp +++ b/Source/Core/Common/MemArenaWin.cpp @@ -61,22 +61,23 @@ MemArena::MemArena() if (!static_cast(ptr_IsApiSetImplemented)("api-ms-win-core-memory-l1-1-6")) return; - const HMODULE handle = LoadLibraryW(L"api-ms-win-core-memory-l1-1-6.dll"); - if (!handle) + m_api_ms_win_core_memory_l1_1_6_handle.Open("api-ms-win-core-memory-l1-1-6.dll"); + if (!m_api_ms_win_core_memory_l1_1_6_handle.IsOpen()) return; - void* const address_VirtualAlloc2 = GetProcAddress(handle, "VirtualAlloc2FromApp"); - void* const address_MapViewOfFile3 = GetProcAddress(handle, "MapViewOfFile3FromApp"); + void* const address_VirtualAlloc2 = + m_api_ms_win_core_memory_l1_1_6_handle.GetSymbolAddress("VirtualAlloc2FromApp"); + void* const address_MapViewOfFile3 = + m_api_ms_win_core_memory_l1_1_6_handle.GetSymbolAddress("MapViewOfFile3FromApp"); if (address_VirtualAlloc2 && address_MapViewOfFile3) { - m_api_ms_win_core_memory_l1_1_6_handle = handle; m_address_VirtualAlloc2 = address_VirtualAlloc2; m_address_MapViewOfFile3 = address_MapViewOfFile3; } else { // at least one function is not available, use legacy logic - FreeLibrary(handle); + m_api_ms_win_core_memory_l1_1_6_handle.Close(); } } @@ -84,8 +85,6 @@ MemArena::~MemArena() { ReleaseMemoryRegion(); ReleaseSHMSegment(); - if (m_api_ms_win_core_memory_l1_1_6_handle) - FreeLibrary(static_cast(m_api_ms_win_core_memory_l1_1_6_handle)); } void MemArena::GrabSHMSegment(size_t size) @@ -123,7 +122,7 @@ u8* MemArena::ReserveMemoryRegion(size_t memory_size) } u8* base; - if (m_api_ms_win_core_memory_l1_1_6_handle) + if (m_api_ms_win_core_memory_l1_1_6_handle.IsOpen()) { base = static_cast(static_cast(m_address_VirtualAlloc2)( nullptr, nullptr, memory_size, MEM_RESERVE | MEM_RESERVE_PLACEHOLDER, PAGE_NOACCESS, @@ -154,7 +153,7 @@ u8* MemArena::ReserveMemoryRegion(size_t memory_size) void MemArena::ReleaseMemoryRegion() { - if (m_api_ms_win_core_memory_l1_1_6_handle && m_reserved_region) + if (m_api_ms_win_core_memory_l1_1_6_handle.IsOpen() && m_reserved_region) { // user should have unmapped everything by this point, check if that's true and yell if not // (it indicates a bug in the emulated memory mapping logic) @@ -291,7 +290,7 @@ WindowsMemoryRegion* MemArena::EnsureSplitRegionForMapping(void* start_address, void* MemArena::MapInMemoryRegion(s64 offset, size_t size, void* base) { - if (m_api_ms_win_core_memory_l1_1_6_handle) + if (m_api_ms_win_core_memory_l1_1_6_handle.IsOpen()) { WindowsMemoryRegion* const region = EnsureSplitRegionForMapping(base, size); if (!region) @@ -393,7 +392,7 @@ bool MemArena::JoinRegionsAfterUnmap(void* start_address, size_t size) void MemArena::UnmapFromMemoryRegion(void* view, size_t size) { - if (m_api_ms_win_core_memory_l1_1_6_handle) + if (m_api_ms_win_core_memory_l1_1_6_handle.IsOpen()) { if (UnmapViewOfFileEx(view, MEM_PRESERVE_PLACEHOLDER)) { From 4a9553bf6df683b9f2995e5d7c485bf3034502e1 Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Mon, 14 Feb 2022 21:54:21 +0100 Subject: [PATCH 047/237] MemArena: Load UnmapViewOfFileEx dynamically to restore Windows 7 support. --- Source/Core/Common/MemArena.h | 2 ++ Source/Core/Common/MemArenaWin.cpp | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/Source/Core/Common/MemArena.h b/Source/Core/Common/MemArena.h index 5aebeccc23ff..7a0a68357976 100644 --- a/Source/Core/Common/MemArena.h +++ b/Source/Core/Common/MemArena.h @@ -107,7 +107,9 @@ class MemArena final std::vector m_regions; void* m_reserved_region = nullptr; void* m_memory_handle = nullptr; + Common::DynamicLibrary m_kernel32_handle; Common::DynamicLibrary m_api_ms_win_core_memory_l1_1_6_handle; + void* m_address_UnmapViewOfFileEx = nullptr; void* m_address_VirtualAlloc2 = nullptr; void* m_address_MapViewOfFile3 = nullptr; #else diff --git a/Source/Core/Common/MemArenaWin.cpp b/Source/Core/Common/MemArenaWin.cpp index d94eb7c9d81e..1e2de701c5dd 100644 --- a/Source/Core/Common/MemArenaWin.cpp +++ b/Source/Core/Common/MemArenaWin.cpp @@ -29,6 +29,8 @@ using PMapViewOfFile3 = PVOID(WINAPI*)(HANDLE FileMapping, HANDLE Process, PVOID MEM_EXTENDED_PARAMETER* ExtendedParameters, ULONG ParameterCount); +using PUnmapViewOfFileEx = BOOL(WINAPI*)(PVOID BaseAddress, ULONG UnmapFlags); + using PIsApiSetImplemented = BOOL(APIENTRY*)(PCSTR Contract); namespace Common @@ -62,22 +64,30 @@ MemArena::MemArena() return; m_api_ms_win_core_memory_l1_1_6_handle.Open("api-ms-win-core-memory-l1-1-6.dll"); - if (!m_api_ms_win_core_memory_l1_1_6_handle.IsOpen()) + m_kernel32_handle.Open("Kernel32.dll"); + if (!m_api_ms_win_core_memory_l1_1_6_handle.IsOpen() || !m_kernel32_handle.IsOpen()) + { + m_api_ms_win_core_memory_l1_1_6_handle.Close(); + m_kernel32_handle.Close(); return; + } void* const address_VirtualAlloc2 = m_api_ms_win_core_memory_l1_1_6_handle.GetSymbolAddress("VirtualAlloc2FromApp"); void* const address_MapViewOfFile3 = m_api_ms_win_core_memory_l1_1_6_handle.GetSymbolAddress("MapViewOfFile3FromApp"); - if (address_VirtualAlloc2 && address_MapViewOfFile3) + void* const address_UnmapViewOfFileEx = m_kernel32_handle.GetSymbolAddress("UnmapViewOfFileEx"); + if (address_VirtualAlloc2 && address_MapViewOfFile3 && address_UnmapViewOfFileEx) { m_address_VirtualAlloc2 = address_VirtualAlloc2; m_address_MapViewOfFile3 = address_MapViewOfFile3; + m_address_UnmapViewOfFileEx = address_UnmapViewOfFileEx; } else { // at least one function is not available, use legacy logic m_api_ms_win_core_memory_l1_1_6_handle.Close(); + m_kernel32_handle.Close(); } } @@ -394,7 +404,8 @@ void MemArena::UnmapFromMemoryRegion(void* view, size_t size) { if (m_api_ms_win_core_memory_l1_1_6_handle.IsOpen()) { - if (UnmapViewOfFileEx(view, MEM_PRESERVE_PLACEHOLDER)) + if (static_cast(m_address_UnmapViewOfFileEx)(view, + MEM_PRESERVE_PLACEHOLDER)) { if (!JoinRegionsAfterUnmap(view, size)) PanicAlertFmt("Joining memory region failed."); From 20b2300ce1cc5b6bd2d8cf347ae99f77d3f32035 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Mon, 14 Feb 2022 22:09:21 +0100 Subject: [PATCH 048/237] PPCAnalyst: Count outputs as being in use In a code block where a guest register is accessed at least twice and the last access is a write and the register is not discardable immediately after the second-to-last instruction (perhaps there is an instruction in between that can cause an exception), currently Dolphin's JITs will flush the register after the second-to-last instruction. It would be better if we replaced the flush after the second-to-last instruction with a flush that only happens if the exception path is taken. This change accomplishes that by marking guest registers as "in use" not just when they are used as inputs but also when they are used as outputs, preventing the loop in DoJit from flushing the register until after the last access. --- Source/Core/Core/PowerPC/PPCAnalyst.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Source/Core/Core/PowerPC/PPCAnalyst.cpp b/Source/Core/Core/PowerPC/PPCAnalyst.cpp index 36821dce1341..be5fb04e92c4 100644 --- a/Source/Core/Core/PowerPC/PPCAnalyst.cpp +++ b/Source/Core/Core/PowerPC/PPCAnalyst.cpp @@ -946,8 +946,8 @@ u32 PPCAnalyzer::Analyze(u32 address, CodeBlock* block, CodeBuffer* buffer, op.gprDiscardable = gprDiscardable; op.fprDiscardable = fprDiscardable; op.fprInXmm = fprInXmm; - gprInUse |= op.regsIn; - fprInUse |= op.fregsIn; + gprInUse |= op.regsIn | op.regsOut; + fprInUse |= op.fregsIn | op.GetFregsOut(); if (op.canEndBlock || op.canCauseException) { gprDiscardable = BitSet32{}; From ffbe6cb21f35ea8502da8c029266b566e2833a60 Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Tue, 22 Jun 2021 00:15:03 +0200 Subject: [PATCH 049/237] MemArena: Keep fastmem region mapped on Linux. --- Source/Core/Common/MemArena.h | 6 +++++ Source/Core/Common/MemArenaUnix.cpp | 37 ++++++++++++++++++++--------- 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/Source/Core/Common/MemArena.h b/Source/Core/Common/MemArena.h index 7a0a68357976..e365b78e0ba0 100644 --- a/Source/Core/Common/MemArena.h +++ b/Source/Core/Common/MemArena.h @@ -113,7 +113,13 @@ class MemArena final void* m_address_VirtualAlloc2 = nullptr; void* m_address_MapViewOfFile3 = nullptr; #else +#ifdef ANDROID int fd; +#else + int m_shm_fd; + void* m_reserved_region; + std::size_t m_reserved_region_size; +#endif #endif }; diff --git a/Source/Core/Common/MemArenaUnix.cpp b/Source/Core/Common/MemArenaUnix.cpp index 1743c2ace018..549ce09cfdd6 100644 --- a/Source/Core/Common/MemArenaUnix.cpp +++ b/Source/Core/Common/MemArenaUnix.cpp @@ -28,30 +28,39 @@ MemArena::~MemArena() = default; void MemArena::GrabSHMSegment(size_t size) { const std::string file_name = "/dolphin-emu." + std::to_string(getpid()); - fd = shm_open(file_name.c_str(), O_RDWR | O_CREAT | O_EXCL, 0600); - if (fd == -1) + m_shm_fd = shm_open(file_name.c_str(), O_RDWR | O_CREAT | O_EXCL, 0600); + if (m_shm_fd == -1) { ERROR_LOG_FMT(MEMMAP, "shm_open failed: {}", strerror(errno)); return; } shm_unlink(file_name.c_str()); - if (ftruncate(fd, size) < 0) + if (ftruncate(m_shm_fd, size) < 0) ERROR_LOG_FMT(MEMMAP, "Failed to allocate low memory space"); } void MemArena::ReleaseSHMSegment() { - close(fd); + close(m_shm_fd); } void* MemArena::CreateView(s64 offset, size_t size) { - return MapInMemoryRegion(offset, size, nullptr); + void* retval = mmap(nullptr, size, PROT_READ | PROT_WRITE, MAP_SHARED, m_shm_fd, offset); + if (retval == MAP_FAILED) + { + NOTICE_LOG_FMT(MEMMAP, "mmap failed"); + return nullptr; + } + else + { + return retval; + } } void MemArena::ReleaseView(void* view, size_t size) { - UnmapFromMemoryRegion(view, size); + munmap(view, size); } u8* MemArena::ReserveMemoryRegion(size_t memory_size) @@ -63,19 +72,23 @@ u8* MemArena::ReserveMemoryRegion(size_t memory_size) PanicAlertFmt("Failed to map enough memory space: {}", LastStrerrorString()); return nullptr; } - munmap(base, memory_size); + m_reserved_region = base; + m_reserved_region_size = memory_size; return static_cast(base); } void MemArena::ReleaseMemoryRegion() { + if (m_reserved_region) + { + munmap(m_reserved_region, m_reserved_region_size); + m_reserved_region = nullptr; + } } void* MemArena::MapInMemoryRegion(s64 offset, size_t size, void* base) { - void* retval = mmap(base, size, PROT_READ | PROT_WRITE, - MAP_SHARED | ((base == nullptr) ? 0 : MAP_FIXED), fd, offset); - + void* retval = mmap(base, size, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_FIXED, m_shm_fd, offset); if (retval == MAP_FAILED) { NOTICE_LOG_FMT(MEMMAP, "mmap failed"); @@ -89,6 +102,8 @@ void* MemArena::MapInMemoryRegion(s64 offset, size_t size, void* base) void MemArena::UnmapFromMemoryRegion(void* view, size_t size) { - munmap(view, size); + void* retval = mmap(view, size, PROT_NONE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, -1, 0); + if (retval == MAP_FAILED) + NOTICE_LOG_FMT(MEMMAP, "mmap failed"); } } // namespace Common From e38918565b4cb4d3a94499a41569529515e44bc6 Mon Sep 17 00:00:00 2001 From: Nora Date: Sun, 30 Jan 2022 19:40:45 -0500 Subject: [PATCH 050/237] GameINI: Update Cheats for GMPE01, GP5E01, GP6E01, GP7E01, RM8E01 --- Data/Sys/GameSettings/GMPE01.ini | 31 +++++++-- Data/Sys/GameSettings/GP5E01.ini | 76 ++++++++++++++------- Data/Sys/GameSettings/GP6E01.ini | 72 +++++++++++++++++--- Data/Sys/GameSettings/GP7E01.ini | 109 ++++++++++++++++++++++++------- Data/Sys/GameSettings/RM8E01.ini | 4 ++ 5 files changed, 232 insertions(+), 60 deletions(-) diff --git a/Data/Sys/GameSettings/GMPE01.ini b/Data/Sys/GameSettings/GMPE01.ini index b48d546033b4..1cd20fde2208 100644 --- a/Data/Sys/GameSettings/GMPE01.ini +++ b/Data/Sys/GameSettings/GMPE01.ini @@ -261,6 +261,9 @@ $Extra - Disable Music 041d3d1c 00000001 *Disables all in-game music. +$Mechanics - Battle Games Always Worth 20 Coins [Nora] +001D5DE0 00040032 + $Mechanics - Double the Amount of Turns [Airsola] C205BEF8 00000004 2C000032 4182000C @@ -295,12 +298,6 @@ e2000001 80008000 e2000001 80008000 *Access all board events regardless of Mega / Mini Mushrooms. -$Mechanics - Free Lottery Game [Celerizer] -0407b28c 38600005 -0407bd04 38800000 -*Makes the lottery cost nothing instead of the usual 5 coins. -*You need to have 5 coins but you do not lose them. - $Mechanics - Free Stars [gamemasterplc] 040843cc 2c030000 04084590 2c030000 @@ -347,6 +344,28 @@ c208a07c 0000001f *Item Deletion is now a thing. *Press B to view your items, then delete the item with R. +$Mechanics - Lose 0 Coins On Red Spaces [Nora] +0407FD74 60000000 +0407FD78 3BC00000 + +$Mechanics - Lose 20 Coins On Red Spaces [Nora] +0407FD74 60000000 +0407FD78 3BC00014 + +$Mechanics - Lottery 3rd Place Prize is Gaddlight or Warp Pipe [Nora] +021D5678 00000805 + +$Mechanics - Lottery 3rd Place Prize is Magic Lamp or Boo's Crystal Ball [Nora] +021D5678 00000BOC + +$Mechanics - Lottery Costs 0 Coins [Nora] +0407BD20 2C1E0000 +*Makes the lottery cost nothing instead of the usual 5 coins. + +$Mechanics - Lottery Costs 10 Coins [Nora] +0407BD20 2C1E000A +*Makes the lottery cost nothing instead of the usual 5 coins. + $Mechanics - Mega Status No Longer Steals Coins [gamemasterplc] 0406be70 38600000 0406beac 38600000 diff --git a/Data/Sys/GameSettings/GP5E01.ini b/Data/Sys/GameSettings/GP5E01.ini index 424c394f2f60..39f95bb06861 100644 --- a/Data/Sys/GameSettings/GP5E01.ini +++ b/Data/Sys/GameSettings/GP5E01.ini @@ -466,6 +466,7 @@ $Press L+X: Player Stops Moving [Datel] [Gecko] # Add gecko cheats here. + $QOL - Automatically Advance Text Boxes [gamemasterplc] F6000001 80008180 A81F019C 7C001838 @@ -538,6 +539,12 @@ $QOL - Unlock Everything 04472838 3880FFFF 0446BFCC 38C0FFFF E2000001 80008000 +F6000002 80008180 +5460D808 54640FFE +7C040050 5400283E +14000020 3C80FFFF +14000024 6084FFFF +E0000000 80008000 *Unlocks Everything in the game. $Board: Bowser Nightmare - Bowser does not Steal Coins [gamemasterplc] @@ -561,10 +568,15 @@ $Board: Future Dream - Free Taxi Ride [gamemasterplc] E2000001 80008000 *Taxi does not charge you 10 coins in the board Future Dream. -$Board: Pirate Dream - Free Thwomps [gamemasterplc] +$Board: Pirate Dream - Free Thwomps & Whomps [gamemasterplc] 0412811C 4800002C 04128210 38800000 -*Thwomp is now free to pass in the board Pirate Dream. +F6000001 80008180 +4080002C 386003A0 +14000000 4800002C +140000C0 38800000 +E0000000 80008000 +*Thwomp and Whomp are now free to pass in the board Pirate Dream. $Board: Rainbow Dream - Free Bridge Crossings [gamemasterplc] 2046A0B0 2C030005 @@ -594,13 +606,6 @@ $Mechanics - Battle Minigames Don't Affect Mini-Game Star [gamemasterplc] e2000001 80008000 *Battle Minigames do not count towards the Mini-Game Star. -$Mechanics - Disable Bonus Stars [gamemasterplc] -20484E0C 40820020 -04484E0C 48000020 -04484E44 4800001C -E2000001 80008000 -*Disables the Bonus Stars - $Mechanics - Disable Capsules [gamemasterplc] F6000001 80008180 3BC50005 887E0000 @@ -675,7 +680,7 @@ E2100000 80008000 E2000001 80008000 *Last 5 Turns Event is always x3 Coins on Spaces. -$Mechanics- Last 5 Turns is 5 Star Spaces [gamemasterplc] +$Mechanics - Last 5 Turns is 5 Star Spaces [gamemasterplc] 20288860 00000076 045152BC 00000003 E2100000 80008000 @@ -710,11 +715,25 @@ E2100000 80008000 E2000001 80008000 *Last 5 Turns Event forces Red Spaces to turn into Bowser Spaces. -$Mechanics - Same Space Duels Don't Happen [gamemasterplc] -F6000001 80008180 -7C1D0000 40820048 -14000004 48000048 +$Mechanics - Lose 0 Coins On Red Spaces [Nora] +040AA160 38800000 + +$Mechanics - Lose 20 Coins On Red Spaces [Nora] +040AA160 3880FFEC + +$Mechanics – Obtain Capsules on Final Turn [gamemasterplc] +F6000001 80008180 +7C040040 40820024 +14000004 48000024 E0000000 80008000 +*Miracles can really happen + +$Mechanics - Same Space Duels Always Happen [Nora] +04094740 60000000 +*Duels never happen if you are on the same space. + +$Mechanics - Same Space Duels Don't Happen [Nora] +04094740 48000030 *Duels never happen if you are on the same space. $Mechanics - Use Capsules on Yourself For Free [gamemasterplc] @@ -743,6 +762,19 @@ $Minigame: Curvy Curves - 1 Player is Slower [Airsola] e2000001 00000000 *1 Player is slower in Curvy Curves. +$Minigame: Coney Island - No Slow Down +204763E4 3C608048 +044763E0 38000000 +0447663C 38000000 +E2000001 80008000 +*You do not slow down when gathering ice cream. + +$Minigame: Flower Shower - All Flowers are worth 3 Points [gamemasterplc] +20459BEC 801F003C +04459BEC 38000003 +E2000001 80008000 +*This makes the golden flowers the same value as the normal flowers. + $Minigame: Ground Pound Down - No Rocks Until End [gamemasterplc] 2046FCC4 4180FF6C 0446FCC4 60000000 @@ -755,15 +787,15 @@ $Minigame: Later Skater - Only 1 Lap [gamemasterplc] E2000001 80008000 *Only one lap around in the minigame Later Skater. -$Minigame: Mathletes - Always Multiplication [gamemasterplc] -2045D2AC 38030001 -C245D2AC 00000004 -3DC08047 81CE815C -7C0EF800 40820008 -38600000 38030001 -60000000 00000000 +$Minigame: Leaf Leap - Allow score to go above 180 [gamemasterplc] +20474848 408000FC +04474848 60000000 +E2000001 80008000 + +$Minigame: Leaf Leap - Leaves Display Quicker [gamemasterplc] +20474A04 38030001 +04474A04 3800003C E2000001 80008000 -*Multiplication is only present in the minigame Mathletes. $Minigame: Pop Star Piranhas - Halved Time to Pick [Airsola] 20288860 00000016 diff --git a/Data/Sys/GameSettings/GP6E01.ini b/Data/Sys/GameSettings/GP6E01.ini index 8b2b5dff0cb8..36948b933552 100644 --- a/Data/Sys/GameSettings/GP6E01.ini +++ b/Data/Sys/GameSettings/GP6E01.ini @@ -441,7 +441,6 @@ $Game - Orb Expansion [Airsola] 042bd36c 00000000 042bd37c 00000000 00000000 40000000 -# Almost all Orbs become obtainable on each board. Orb prices are rebalanced. [Gecko] # Add gecko cheats here. @@ -534,6 +533,14 @@ F6000001 80008180 E0000000 80008000 *Unlocks Everything in the game. +$Board - Faire Square: Free Whomps [gamemasterplc] +F6000001 80008180 +4080002C 386003A0 +14000000 4800002C +140000C0 38800000 +E0000000 80008000 +*Use Whomp paths without paying 10 coins + $Board - Snowflake Lake: Chomps Roll 7 during daytime, and 3 per dice during nighttime [gamemasterplc] 204cc818 4bcbbacd c24cc810 00000005 @@ -702,13 +709,6 @@ C214C75C 00000004 60000000 00000000 *50 becomes 99 due to integer cap. -$Mechanics - Get Items on Last Turn [gamemasterplc] -F6000001 80008180 -7C040040 4080041C -14000004 60000000 -E0000000 80008000 -*Items are obtainable on the final turn - $Mechanics - Faster Day and Night Cycles [Airsola] 20265b80 00000300 04265b80 00000100 @@ -716,6 +716,20 @@ $Mechanics - Faster Day and Night Cycles [Airsola] 04265b80 C0000100 *Day and Night Cycles take 1 turn instead of 3. +$Mechanics - Free Capsules from Shops [gamemasterplc] +F6000001 80008180 +2C17FFFE 41820198 +14000018 38000000 +14000800 38000000 +14000994 38000000 +E0000000 80008000 +F6000002 80008180 +809D0004 7C041800 +40810044 38600000 +14000000 38800000 +14000244 60000000 +E0000000 80008000 + $Mechanics - Free Stars [gamemasterplc] 0418333C 2C030000 0418342C 2C030000 @@ -748,6 +762,19 @@ $Mechanics - Last 5 Turns is Bowser Revolution [gamemasterplc] *Last 5 Turns Event is always a Bowser Revolution. *This makes everyones coins equal. +$Mechanics - Lose 0 Coins On Red Spaces [Ralf] +0415F278 38800000 + +$Mechanics - Lose 20 Coins On Red Spaces [Ralf] +0415F278 3880FFEC + +$Mechanics - Obtain Orbs on Last Turn [gamemasterplc] +F6000001 80008180 +7C040040 4080041C +14000004 60000000 +E0000000 80008000 +*Items are obtainable on the final turn + $Mechanics - Pink Boo Steals 15 Coins Minimum [Airsola] c21b3498 00000002 3884000d 9081002c @@ -772,6 +799,14 @@ $Mechanics - Sluggish Shroom Rolls 1-5 [Celerizer] 04186b80 4bfffff8 *Sluggish Shroom rolls 1-5 instead of a 1-10. +$Mechanics - Stars Cost 40 Coins [Ralf] +0418333c 2c030028 +0418342c 2c030028 +041834c4 2c030028 +c2183590 00000002 +38800029 7c8400d0 +60000000 00000000 + $Minigame: Body Builder - Faster Wheel [Airsola] 202c0254 0000002d 204d4d28 3fe66666 @@ -791,6 +826,12 @@ e2000002 80008000 e2000002 80008000 *Harder to time the wheel in the minigame Body Builder. +$Minigame: Catch You Letter - All Letters Give 3 Points +204CBD6C 80010028 +044CBD6C 38000003 +E2000001 80008000 +*This essentially nullifies love letters. + $Minigame: Crate & Peril - 3 Players can Jump [Airsola, gamemasterplc] 202c0254 00000025 0408074c 60fb0003 @@ -849,6 +890,21 @@ $Minigame: Memory Lane - No Trail [Airsola] e2000002 80008000 *Shy Guy will no longer leave a trail durning the demo in the minigame Memory Lane. +$Minigame: Mole It – All Moles are Worth 3 Points [gamemasterplc] +204D57F0 A8A10008 +044D57F0 38A00003 +E2000001 80008000 + +$Minigame: Snow Whirled – Just Mash A [gamemasterplc] +204CB568 38650001 +044CB568 38650004 +E2000001 80008000 + +$Minigame: Snow Whirled – Displayed Score Equals Rotations [gamemasterplc] +204CB66C 1C84005A +044CB66C 1C840001 +E2000001 80008000 + $Minigame: Odd Card Out - Spam Prevention [Airsola] 202c0254 00000007 204d0530 b0a30052 diff --git a/Data/Sys/GameSettings/GP7E01.ini b/Data/Sys/GameSettings/GP7E01.ini index bf05e3f55902..2686dc68f97b 100644 --- a/Data/Sys/GameSettings/GP7E01.ini +++ b/Data/Sys/GameSettings/GP7E01.ini @@ -568,17 +568,17 @@ $Mechanics - Orb Expansion [Airsola] 042ef744 00000000 00000000 40000000 -# Use with the Allow All Character Orbs cheat below. -# (Almost) all Orbs become obtainable on each board. Orb prices are rebalanced. -# CPUs cannot use character orbs that are not their own. -# Triple Shroom is removed, as this code is meant to be used in conjunction with the Orb balancing codes. - [Gecko] # Add gecko cheats here. + $QOL - Auto Advance Text Boxes [gamemasterplc] 04050ddc 60000000 *Automatically scrolls the text boxes without pushing A +$QOL - Controller Options Always Acesssible [Ralf] +0422D890 4800000C +*Controller options are always accessible in the pause menu + $QOL - Faster Boot Time [gamemasterplc] 204dd974 2c030000 044dd978 60000000 @@ -681,17 +681,18 @@ E2000001 80008000 *Unlocks everything in the game. *Permanent once saved! -$Board - Neon Heights: Balance Changes [gamemasterplc] +$Board - Neon Heights: Acting Minigame Only Awards if All Are Hit 202f2f3c 0000007d -c24ec2b0 00000003 -a87e02e8 2803000f -41a20008 38600000 -60000000 00000000 c24ec414 00000004 7c9e82ae 2804000f 41a2000c 38800000 48000008 3880000a 60000000 00000000 +e2000001 8000800 +*The shooting board event now awards 10 coins, but only if the player hits all the targets. + +$Board - Neon Heights: Bombs Now Give 20 Coins [Airsola] +202f2f3c 0000007d 044e0574 38000014 044e0bac 38a00014 044e08d8 38000014 @@ -701,11 +702,7 @@ c24ec414 00000004 044e2564 3b200014 044e269c 3b000014 e2000001 80008000 -202f2f3c 0000007d -28527bfa 00000256 -04527c78 4039999a -e2000002 80008000 -*All chests cost 20 coins, and the non-star chests both contain 20 coins. The shooting board event now awards 10 coins, but only if the player hits all the targets. It is now possible for 1 player to reach the star in the rocket happening event in the board Neon Heights. +*All chests cost 20 coins, and the non-star chests both contain 20 coins. $Board - Neon Heights: Bowser Path for Free [gamemasterplc] 204fdef4 2c03000a @@ -714,6 +711,19 @@ $Board - Neon Heights: Bowser Path for Free [gamemasterplc] e2000001 80008000 *Use Koopa Kid's path for free instead of paying 10 coins in the board Neon Heights. +$Board - Neon Heights: Rocket Minigame is Possible with 1 Player [Airsola] +202f2f3c 0000007d +28527bfa 00000256 +04527c78 4039999 +e2000002 80008000 +202f2f3c 0000007d +c24ec2b0 00000003 +a87e02e8 2803000f +41a20008 38600000 +60000000 00000000 +e2000001 80008000 +*It is now possible for 1 player to reach the star in the rocket happening event in the board Neon Heights. + $Board - Pagoda Peak: Bottle Rocket Launch Always Succeeds [gamemasterplc] 204df8a0 2803004b 044df8a4 60000000 @@ -896,6 +906,10 @@ c24e6288 00000006 e2000001 80008000 *Battle Minigames do not count towards the Mini-Game Star. +$Mechanics - Character Spaces Give 10 coins [Ralf] +041685BC 60000000 +041685C0 38800005 + $Mechanics - Disable Bonus Stars [gamemasterplc] 204DD870 88650004 044DD870 38600000 @@ -973,6 +987,21 @@ e2000001 80008000 e2000001 80008000 *Stars cost nothing instead of the usual 20 coins. +$Mechanics - Gain 5 Coins On Flower Orb Spaces [Ralf] +041C4F30 38A00005 + +$Mechanics - Koopa Kid Orb Is Found in Shops [Ralf] +0419EB0C 48000024 +04274ADC 00000001 +06004860 0000002C +88030000 28000028 +4082001C 3C80802E +6084F598 38A00010 +4BFFECC9 38000028 +98030000 3B9C0001 +481E5098 00000000 +041E991C 4BE1AF44 + $Mechanics - Hammer Bro Orb Steals 20 Coins [Ralf] 041A902C 3AC00014 041A9A28 38A0FFEC @@ -1004,25 +1033,33 @@ c21d8ed0 00000007 041d8f68 3aa00028 *(X) -> 40 Coins, 10 Coins -> 40 Coins, Half Coins -> 40 Coins, 2 Stars -> 1 Star -$Mechanics - Last 5 Turns Event Is Always x3 Coins on Spaces +$Mechanics - Last 5 Turns Event Is Always x3 Coins on Spaces [gamemasterplc] 042311A8 38000000 -$Mechanics - Last 5 Turns Event Is Always 10 More Koopa Kid Spaces +$Mechanics - Last 5 Turns Event Is Always 10 More Koopa Kid Spaces [gamemasterplc] 042311A8 38000001 -$Mechanics - Last 5 Turns Event Is Always 40 Coin Bonus +$Mechanics - Last 5 Turns Event Is Always 40 Coin Bonus [gamemasterplc] 042311A8 38000002 -$Mechanics - Last 5 Turns Event Is Always Stars are 10 Coins +$Mechanics - Last 5 Turns Event Is Always Stars are 10 Coins [gamemasterplc] 042311A8 38000003 -$Mechanics - Last 5 Turns Event Is Always Red Spaces are Bowser Spaces +$Mechanics - Last 5 Turns Event Is Always Red Spaces are Bowser Spaces [gamemasterplc] 042311A8 38000004 -$Mechanics - Last 5 Turns Event Is Always Chain Chomp Ride for 5 Coins (Pyramid Park) +$Mechanics - Last 5 Turns Event Is Always Chain Chomp Ride for 5 Coins (Pyramid Park) [gamemasterplc] 042311A8 38000005 *Only use on Pyramid Park +$Mechanics - Lose 0 Coins On Red Spaces [Ralf] +04168600 60000000 +04168604 38800000 + +$Mechanics - Lose 20 Coins On Red Spaces [Ralf] +04168600 60000000 +04168604 3880FFEC + $Mechanics - Mic Minigames Have A 10% Chance Of Appearing Instead Of 20% [gamemasterplc] 042f7a04 41200000 *Less Mic Minigames and see more standard minigames. @@ -1035,14 +1072,23 @@ $Mechanics - Microphone Always Off [gamemasterplc] $Mechanics - Orbs Can Only Be Placed On Your Current Space [gamemasterplc] 041e7128 60000000 -*Orb is more aggressively balanced. +*Orbs are more aggressively balanced. + +$Mechanics - Piranha Plant Takes ALL Coins [gamemasterplc] +041AC0DC 38030000 +041AC0E8 7C771B78 $Mechanics - Slow Shroom Orb Rolls 1-5 [gamemasterplc] 0418ccc8 38000005 0418d1ac 40820020 *Orb is more balanced. -$Mechanics - Use Multiple Orbs in the Same Turn [Nora] +$Mechanics - Stars Cost 40 coins [Ralf] +0418876c 3b800014 +04188774 3b800028 +*Only works in Grand Canal and Bowser's Enchanted Inferno + +$Mechanics - Use Multiple Orbs in the Same Turn [Ralf] 041E3764 3860FFFF *Does not work with CPUs @@ -1084,6 +1130,11 @@ e2000002 80008000 e2000002 80008000 *Does not work with CPUs +$Minigame: Kart Wheeled - Only 1 Lap [gamemasterplc] +204DE978 28050005 +044DE978 28050001 +E2000001 80008000 + $Minigame: La Bomba - Always 4 Bombs [Airsola, Rain] 202f2f3c 0000001a 044e8cec 00000004 @@ -1098,12 +1149,22 @@ $Minigame: Pogo-a-Go-Go - Spin Faster/Less Airtime [Airsola] e2000001 80008000 *The 1P can spin the platform faster. The 3P have less airtime on their hops -$Mingiame: Spinner Cell - Faster Machines [Airsola, Rain] +$Minigame: Picture This - Only 1 Round [gamemasterplc] +204FF970 7C00F214 +044FF970 38000003 +E2000001 80008000 + +$Minigame: Spinner Cell - Faster Machines [Airsola, Rain] 202f2f3c 0000001d 044ef5d0 40c00000 044ef5d4 41100000 e2000001 80008000 +$Minigame: Target Tag - All Targets Worth 50 Points [gamemasterplc] +204EAC28 80840000 +044EAC28 38800032 +E2000001 80008000 + $Minigame: Spray Anything - Faster Bubbles/Less Cooldown [Airsola, gamemasterplc] 202f2f3c 0000001b 0450bc40 c10a872b diff --git a/Data/Sys/GameSettings/RM8E01.ini b/Data/Sys/GameSettings/RM8E01.ini index ce96e76253ac..dae60fb48c40 100644 --- a/Data/Sys/GameSettings/RM8E01.ini +++ b/Data/Sys/GameSettings/RM8E01.ini @@ -205,6 +205,10 @@ $QOL - Increased Text Display [Nora] 0405DED4 60000000 *Text is displayed instantly. +$QOL - Invert IR Stick for GameCube Mod [gamemasterplc] +04106A24 83DE6404 +04106A28 60000000 + $QOL - Remove Explanations [Rain] 82200001 80228760 86300001 ffffbfff From 5e4d1f732db7f2ef397c55ba8fadbbf2975c6bbe Mon Sep 17 00:00:00 2001 From: Zopolis4 Date: Thu, 3 Feb 2022 10:40:28 +1100 Subject: [PATCH 051/237] Remove the Maps folder from the Sys directory It only had two out of date and game-specific maps from decades ago --- Data/Sys/Maps/GFZE01.map | 41 ---------------------------------- Data/Sys/Maps/GMBE8P.map | 34 ---------------------------- Readme.md | 1 - Source/Core/Core/Boot/Boot.cpp | 19 +++++----------- 4 files changed, 6 insertions(+), 89 deletions(-) delete mode 100644 Data/Sys/Maps/GFZE01.map delete mode 100644 Data/Sys/Maps/GMBE8P.map diff --git a/Data/Sys/Maps/GFZE01.map b/Data/Sys/Maps/GFZE01.map deleted file mode 100644 index 464dba3610af..000000000000 --- a/Data/Sys/Maps/GFZE01.map +++ /dev/null @@ -1,41 +0,0 @@ -.text -80003488 000000b8 80003488 0 __fill_mem -80003540 00000050 80003540 0 memcpy -8006cff8 0000004c 8006cff8 0 .LoadQuantizers -8006d044 0000002c 8006d044 0 .kill_infinites_helper -8006d070 00000018 8006d070 0 .kill_infinites -8006d088 0000002c 8006d088 0 .rsqrt -8006d0b4 00000034 8006d0b4 0 .sqrt_internal_fz -8006d0e8 00000030 8006d0e8 0 .rsqrt_internal_fz -8006d118 00000070 8006d118 0 .sqrt_fz -8006d188 00000030 8006d188 0 .wrapping_once_fp_lookup -8006d1b8 00000064 8006d1b8 0 .weird2 -8006d1c4 00000058 8006d1c4 0 .into_weird2 -8006d21c 00000030 8006d21c 0 .lookup_some_float_in_table_with_neg_wrap -8006d24c 00000184 8006d24c 0 .atan2 -8006d3d0 0000009c 8006d3d0 0 .asin_fz -8006d46c 000000c8 8006d46c 0 .acos_fz -8006d534 00000070 8006d534 0 .evil_vec_cosine -8006d5f0 00000078 8006d5f0 0 .evil_vec_setlength -8006d668 00000094 8006d668 0 .evil_vec_something -8006d6fc 0000005c 8006d6fc 0 .func -8006d784 0000002c 8006d784 0 .load_strange_matrix1 -8006d7b0 0000002c 8006d7b0 0 .load_strange_matrix2 -8006d7f4 0000003c 8006d7f4 0 .some_strange_destination -8006db30 00000044 8006db30 0 .push_matrix_3x3? -8006db74 00000038 8006db74 0 .write_top_3x3_matrix -8006dbe4 0000003c 8006dbe4 0 .read_current_3x3_matrix -8006dc20 00000014 8006dc20 0 .pop_matrix_stack -8006e424 00000074 8006e424 0 .weird_param_in_p1_p2 -8006e978 000001d4 8006e978 0 zz_006e978_ -8006eb4c 000001c0 8006eb4c 0 zz_006eb4c_ -8006f6a8 000000cc 8006f6a8 0 .z_last_skum_function -800798f0 000000ec 800798f0 0 __div2u -800799dc 00000138 800799dc 0 __div2i -80079b14 000000e0 80079b14 0 __mod2u -80079bf8 0000010c 80079bf8 0 __mod2i -80079d04 00000024 80079d04 0 __shl2i -80079d28 00000024 80079d28 0 __shr2u -80079d4c 00000028 80079d4c 0 __shr2i -8008596c 00000310 8008596c 0 big_matrix_trickery -80088538 00000020 80088538 0 zz_0088538_ diff --git a/Data/Sys/Maps/GMBE8P.map b/Data/Sys/Maps/GMBE8P.map deleted file mode 100644 index 7cbab7e984b6..000000000000 --- a/Data/Sys/Maps/GMBE8P.map +++ /dev/null @@ -1,34 +0,0 @@ -.text -800031f0 0000001c 800031f0 0 load_sp_rtoc -80007034 0000004c 80007034 0 .LoadQuantizers -80007080 00000030 80007080 0 .LoadInfinitiesEtc -800070b0 00000038 800070b0 0 .rsqrt -800070ec 00000040 800070ec 0 .sqrt_internal_needs_cr1 -8000712c 00000040 8000712c 0 .rsqrt_internal_needs_cr1 -800071e0 00000030 800071e0 0 .wrapping_once_fp_lookup -80007210 00000064 80007210 0 .weird2 -80007274 00000030 80007274 0 .lookup_some_float_in_table_with_neg_wrap -800072a4 00000180 800072a4 0 .atan2 -80007424 000000b8 80007424 0 .calls_sqrt -800074dc 0000005c 800074dc 0 .func -80007538 0000002c 80007538 0 .load_strange_matrix1 -80007564 0000002c 80007564 0 .load_strange_matrix3 -80007590 0000002c 80007590 0 .load_strange_matrix2 -80007834 00000044 80007834 0 .push_matrix_3x3? -80007878 00000038 80007878 0 .read_top_3x3matrix -800078b0 00000038 800078b0 0 .write_top_3x3_matrix -800078e8 0000003c 800078e8 0 .read_current_3x3_matrix -80007924 00000014 80007924 0 .pop_matrix_stack -80007a50 00000170 80007a50 0 .mult_matrix? -80007ecc 000000bc 80007ecc 0 .weird_vector_op_status_in_cr2 -80007f88 00000074 80007f88 0 .weird_param_in_p1_p2 -800080fc 00000078 800080fc 0 .evil_normalize -80008174 00000078 80008174 0 .evil_vec_setlength -800081ec 00000070 800081ec 0 .evil_vec_cosine -80008538 000000f0 80008538 0 .calls_evil1 -8000875c 00000088 8000875c 0 .another_caller -800087e4 0000008c 800087e4 0 .another_caller2 -80008d30 000001b4 80008d30 0 .another_caller3 -80036544 000001b4 80036544 0 .fctiwi_weird2 -8003dd1c 00000110 8003dd1c 0 .fctwi_weird -80043b48 000005bc 80043b48 0 .fctwi_weird3 diff --git a/Readme.md b/Readme.md index 5ec9acee9da9..80f0f1424f9d 100644 --- a/Readme.md +++ b/Readme.md @@ -200,7 +200,6 @@ These folders are installed read-only and should not be changed: * `GameSettings`: per-game default settings database * `GC`: DSP and font dumps -* `Maps`: symbol tables (dev only) * `Shaders`: post-processing shaders * `Themes`: icon themes for GUI * `Resources`: icons that are theme-agnostic diff --git a/Source/Core/Core/Boot/Boot.cpp b/Source/Core/Core/Boot/Boot.cpp index 30691c730fa0..0ad7f6ec1265 100644 --- a/Source/Core/Core/Boot/Boot.cpp +++ b/Source/Core/Core/Boot/Boot.cpp @@ -362,24 +362,17 @@ void CBoot::UpdateDebugger_MapLoaded() bool CBoot::FindMapFile(std::string* existing_map_file, std::string* writable_map_file) { const std::string& game_id = SConfig::GetInstance().m_debugger_game_id; + std::string path = File::GetUserPath(D_MAPS_IDX) + game_id + ".map"; if (writable_map_file) - *writable_map_file = File::GetUserPath(D_MAPS_IDX) + game_id + ".map"; + *writable_map_file = path; - static const std::array maps_directories{ - File::GetUserPath(D_MAPS_IDX), - File::GetSysDirectory() + MAPS_DIR DIR_SEP, - }; - for (const auto& directory : maps_directories) + if (File::Exists(path)) { - std::string path = directory + game_id + ".map"; - if (File::Exists(path)) - { - if (existing_map_file) - *existing_map_file = std::move(path); + if (existing_map_file) + *existing_map_file = std::move(path); - return true; - } + return true; } return false; From 68d987bbee49f96f25b0e74c3e24946c0f0183a6 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Tue, 15 Feb 2022 22:34:29 -0800 Subject: [PATCH 052/237] CommandProcessor: Add FIFO_BP_LO/HI to directly_mapped_vars directly_mapped_vars was added in #69 (4129b30494757a79daf8a07e6a07ea937ba1c94b), but for some reason FIFO_BP_LO/HI were split out from it in in #885 (65af90669bd5f9e02bbaa994d51d5c83d147b868). As far as I can tell, this code (and the code that existed at the time) is identical, so there's no reason to have it handled separately. --- Source/Core/VideoCommon/CommandProcessor.cpp | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/Source/Core/VideoCommon/CommandProcessor.cpp b/Source/Core/VideoCommon/CommandProcessor.cpp index 890d7ebfa4bc..6ebb726d23d0 100644 --- a/Source/Core/VideoCommon/CommandProcessor.cpp +++ b/Source/Core/VideoCommon/CommandProcessor.cpp @@ -121,11 +121,6 @@ void DoState(PointerWrap& p) p.Do(s_interrupt_waiting); } -static inline void WriteLow(std::atomic& reg, u16 lowbits) -{ - reg.store((reg.load(std::memory_order_relaxed) & 0xFFFF0000) | lowbits, - std::memory_order_relaxed); -} static inline void WriteHigh(std::atomic& reg, u16 highbits) { reg.store((reg.load(std::memory_order_relaxed) & 0x0000FFFF) | (static_cast(highbits) << 16), @@ -205,6 +200,8 @@ void RegisterMMIO(MMIO::Mapping* mmio, u32 base) {FIFO_WRITE_POINTER_HI, MMIO::Utils::HighPart(&fifo.CPWritePointer), false, WMASK_HI_RESTRICT}, // FIFO_READ_POINTER has different code for single/dual core. + {FIFO_BP_LO, MMIO::Utils::LowPart(&fifo.CPBreakpoint), false, WMASK_LO_ALIGN_32BIT}, + {FIFO_BP_HI, MMIO::Utils::HighPart(&fifo.CPBreakpoint), false, WMASK_HI_RESTRICT}, }; for (auto& mapped_var : directly_mapped_vars) @@ -214,16 +211,6 @@ void RegisterMMIO(MMIO::Mapping* mmio, u32 base) MMIO::DirectWrite(mapped_var.ptr, mapped_var.wmask)); } - mmio->Register(base | FIFO_BP_LO, MMIO::DirectRead(MMIO::Utils::LowPart(&fifo.CPBreakpoint)), - MMIO::ComplexWrite([](u32, u16 val) { - WriteLow(fifo.CPBreakpoint, val & WMASK_LO_ALIGN_32BIT); - })); - mmio->Register(base | FIFO_BP_HI, - MMIO::DirectRead(MMIO::Utils::HighPart(&fifo.CPBreakpoint)), - MMIO::ComplexWrite([WMASK_HI_RESTRICT](u32, u16 val) { - WriteHigh(fifo.CPBreakpoint, val & WMASK_HI_RESTRICT); - })); - // Timing and metrics MMIOs are stubbed with fixed values. struct { From a81b44f69780a15ef6c44377cd88e6e4415ccb8e Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Wed, 16 Feb 2022 11:07:12 -0800 Subject: [PATCH 053/237] Fix menu bar becoming desynced when Pause at End of Movie is disabled Previously, when Pause at End of Movie was disabled, the game would continue running as it should, but the menu bar would think the game was paused, showing the play button instead of the pause button. To make things worse, clicking the play button would then restart the game, instead of pausing or doing nothing. F10 paused/unpaused as normal, though. The old behavior was essentially to enable stepping/pause mode (via `CPU::Break()`) and then if Pause at End of Movie was disabled, to un-pause on the host thread (via `CPU::EnableStepping(false)`). For reasons which aren't entirely clear to me, the first one notified the menu bar (through the `Host::UpdateDisasmDialog` callback, not the `Settings::EmulationStateChanged` one), and the second did not. In any case, this approach does not particularly make sense; I don't see any reason to pause and unpause if Pause at End of Movie is disabled; instead, we should only pause when Pause at End of Movie is enabled. This behavior was probably introduced in c1944f623b5918ab45152795dea9e49f57878138, though I haven't tested it. --- Source/Core/Core/Movie.cpp | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/Source/Core/Core/Movie.cpp b/Source/Core/Core/Movie.cpp index 25b8f540188b..7eebb5c49ca3 100644 --- a/Source/Core/Core/Movie.cpp +++ b/Source/Core/Core/Movie.cpp @@ -1324,7 +1324,7 @@ void EndPlayInput(bool cont) { // We can be called by EmuThread during boot (CPU::State::PowerDown) bool was_running = Core::IsRunningAndStarted() && !CPU::IsStepping(); - if (was_running) + if (was_running && Config::Get(Config::MAIN_MOVIE_PAUSE_MOVIE)) CPU::Break(); s_rerecords = 0; s_currentByte = 0; @@ -1337,11 +1337,7 @@ void EndPlayInput(bool cont) // delete tmpInput; // tmpInput = nullptr; - Core::QueueHostJob([=] { - Core::UpdateWantDeterminism(); - if (was_running && !Config::Get(Config::MAIN_MOVIE_PAUSE_MOVIE)) - CPU::EnableStepping(false); - }); + Core::QueueHostJob([=] { Core::UpdateWantDeterminism(); }); } } From be81fe86e19e3afc70fc9190fd8f5f18f906c0eb Mon Sep 17 00:00:00 2001 From: Merry Date: Wed, 16 Feb 2022 18:45:02 +0000 Subject: [PATCH 054/237] JitRegister: Use fmt Prefer fmt to printf-style formatting. --- Source/Core/Common/JitRegister.cpp | 4 +-- Source/Core/Common/JitRegister.h | 25 ++++++++++--------- .../PowerPC/Jit64Common/Jit64AsmCommon.cpp | 4 +-- .../PowerPC/Jit64Common/TrampolineCache.cpp | 4 +-- .../Core/Core/PowerPC/JitCommon/JitCache.cpp | 4 +-- Source/Core/VideoCommon/VertexLoaderX64.cpp | 5 ++-- 6 files changed, 22 insertions(+), 24 deletions(-) diff --git a/Source/Core/Common/JitRegister.cpp b/Source/Core/Common/JitRegister.cpp index eb5845d5cbc5..0dd5557f1194 100644 --- a/Source/Core/Common/JitRegister.cpp +++ b/Source/Core/Common/JitRegister.cpp @@ -81,15 +81,13 @@ bool IsEnabled() return s_is_enabled; } -void RegisterV(const void* base_address, u32 code_size, const char* format, va_list args) +void Register(const void* base_address, u32 code_size, const std::string& symbol_name) { #if !(defined USE_OPROFILE && USE_OPROFILE) && !defined(USE_VTUNE) if (!s_perf_map_file.IsOpen()) return; #endif - std::string symbol_name = StringFromFormatV(format, args); - #if defined USE_OPROFILE && USE_OPROFILE op_write_native_code(s_agent, symbol_name.c_str(), (u64)base_address, base_address, code_size); #endif diff --git a/Source/Core/Common/JitRegister.h b/Source/Core/Common/JitRegister.h index 084f4486fa36..49dfb5ab447d 100644 --- a/Source/Core/Common/JitRegister.h +++ b/Source/Core/Common/JitRegister.h @@ -2,31 +2,32 @@ // SPDX-License-Identifier: GPL-2.0-or-later #pragma once -#include + #include + +#include + #include "Common/CommonTypes.h" namespace JitRegister { void Init(const std::string& perf_dir); void Shutdown(); -void RegisterV(const void* base_address, u32 code_size, const char* format, va_list args); +void Register(const void* base_address, u32 code_size, const std::string& symbol_name); bool IsEnabled(); -inline void Register(const void* base_address, u32 code_size, const char* format, ...) +template +inline void Register(const void* base_address, u32 code_size, fmt::format_string format, + Args&&... args) { - va_list args; - va_start(args, format); - RegisterV(base_address, code_size, format, args); - va_end(args); + Register(base_address, code_size, fmt::format(format, std::forward(args)...)); } -inline void Register(const void* start, const void* end, const char* format, ...) +template +inline void Register(const void* start, const void* end, fmt::format_string format, + Args&&... args) { - va_list args; - va_start(args, format); u32 code_size = (u32)((const char*)end - (const char*)start); - RegisterV(start, code_size, format, args); - va_end(args); + Register(start, code_size, fmt::format(format, std::forward(args)...)); } } // namespace JitRegister diff --git a/Source/Core/Core/PowerPC/Jit64Common/Jit64AsmCommon.cpp b/Source/Core/Core/PowerPC/Jit64Common/Jit64AsmCommon.cpp index b6fc24e89bfd..124218fb7f5f 100644 --- a/Source/Core/Core/PowerPC/Jit64Common/Jit64AsmCommon.cpp +++ b/Source/Core/Core/PowerPC/Jit64Common/Jit64AsmCommon.cpp @@ -369,7 +369,7 @@ const u8* CommonAsmRoutines::GenQuantizedStoreRuntime(bool single, EQuantizeType const u8* load = AlignCode4(); GenQuantizedStore(single, type, -1); RET(); - JitRegister::Register(start, GetCodePtr(), "JIT_QuantizedStore_%i_%i", type, single); + JitRegister::Register(start, GetCodePtr(), "JIT_QuantizedStore_{}_{}", type, single); return load; } @@ -400,7 +400,7 @@ const u8* CommonAsmRoutines::GenQuantizedLoadRuntime(bool single, EQuantizeType const u8* load = AlignCode4(); GenQuantizedLoad(single, type, -1); RET(); - JitRegister::Register(start, GetCodePtr(), "JIT_QuantizedLoad_%i_%i", type, single); + JitRegister::Register(start, GetCodePtr(), "JIT_QuantizedLoad_{}_{}", type, single); return load; } diff --git a/Source/Core/Core/PowerPC/Jit64Common/TrampolineCache.cpp b/Source/Core/Core/PowerPC/Jit64Common/TrampolineCache.cpp index d30b1eacb95a..faf0b7ec9e86 100644 --- a/Source/Core/Core/PowerPC/Jit64Common/TrampolineCache.cpp +++ b/Source/Core/Core/PowerPC/Jit64Common/TrampolineCache.cpp @@ -48,7 +48,7 @@ const u8* TrampolineCache::GenerateReadTrampoline(const TrampolineInfo& info) JMP(info.start + info.len, true); - JitRegister::Register(trampoline, GetCodePtr(), "JIT_ReadTrampoline_%x", info.pc); + JitRegister::Register(trampoline, GetCodePtr(), "JIT_ReadTrampoline_{:x}", info.pc); return trampoline; } @@ -67,6 +67,6 @@ const u8* TrampolineCache::GenerateWriteTrampoline(const TrampolineInfo& info) JMP(info.start + info.len, true); - JitRegister::Register(trampoline, GetCodePtr(), "JIT_WriteTrampoline_%x", info.pc); + JitRegister::Register(trampoline, GetCodePtr(), "JIT_WriteTrampoline_{:x}", info.pc); return trampoline; } diff --git a/Source/Core/Core/PowerPC/JitCommon/JitCache.cpp b/Source/Core/Core/PowerPC/JitCommon/JitCache.cpp index de55900846b4..ca090547f932 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitCache.cpp +++ b/Source/Core/Core/PowerPC/JitCommon/JitCache.cpp @@ -131,12 +131,12 @@ void JitBaseBlockCache::FinalizeBlock(JitBlock& block, bool block_link, if (JitRegister::IsEnabled() && (symbol = g_symbolDB.GetSymbolFromAddr(block.effectiveAddress)) != nullptr) { - JitRegister::Register(block.checkedEntry, block.codeSize, "JIT_PPC_%s_%08x", + JitRegister::Register(block.checkedEntry, block.codeSize, "JIT_PPC_{}_{:08x}", symbol->function_name.c_str(), block.physicalAddress); } else { - JitRegister::Register(block.checkedEntry, block.codeSize, "JIT_PPC_%08x", + JitRegister::Register(block.checkedEntry, block.codeSize, "JIT_PPC_{:08x}", block.physicalAddress); } } diff --git a/Source/Core/VideoCommon/VertexLoaderX64.cpp b/Source/Core/VideoCommon/VertexLoaderX64.cpp index 40ae50821914..7a4929361dfb 100644 --- a/Source/Core/VideoCommon/VertexLoaderX64.cpp +++ b/Source/Core/VideoCommon/VertexLoaderX64.cpp @@ -50,9 +50,8 @@ VertexLoaderX64::VertexLoaderX64(const TVtxDesc& vtx_desc, const VAT& vtx_att) GenerateVertexLoader(); WriteProtect(); - const std::string name = - fmt::format("VertexLoaderX64\nVtx desc: \n{}\nVAT:\n{}", vtx_desc, vtx_att); - JitRegister::Register(region, GetCodePtr(), name.c_str()); + JitRegister::Register(region, GetCodePtr(), "VertexLoaderX64\nVtx desc: \n{}\nVAT:\n{}", vtx_desc, + vtx_att); } OpArg VertexLoaderX64::GetVertexAddr(CPArray array, VertexComponentFormat attribute) From f56251168e9176b5c74936ad2a41c59495f13a0c Mon Sep 17 00:00:00 2001 From: Dentomologist Date: Wed, 16 Feb 2022 12:44:50 -0800 Subject: [PATCH 055/237] Movie: Convert PlayMode to enum class and move to cpp file --- Source/Core/Core/Movie.cpp | 37 ++++++++++++++++++++++--------------- Source/Core/Core/Movie.h | 7 ------- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/Source/Core/Core/Movie.cpp b/Source/Core/Core/Movie.cpp index 25b8f540188b..5eb9b32a108f 100644 --- a/Source/Core/Core/Movie.cpp +++ b/Source/Core/Core/Movie.cpp @@ -79,9 +79,16 @@ namespace Movie using namespace WiimoteCommon; using namespace WiimoteEmu; +enum class PlayMode +{ + None = 0, + Recording, + Playing, +}; + static bool s_bReadOnly = true; static u32 s_rerecords = 0; -static PlayMode s_playMode = MODE_NONE; +static PlayMode s_playMode = PlayMode::None; static std::array s_controllers{}; static std::array s_wiimotes{}; @@ -308,7 +315,7 @@ void SetReadOnly(bool bEnabled) bool IsRecordingInput() { - return (s_playMode == MODE_RECORDING); + return (s_playMode == PlayMode::Recording); } bool IsRecordingInputFromSaveState() @@ -328,12 +335,12 @@ bool IsJustStartingPlayingInputFromSaveState() bool IsPlayingInput() { - return (s_playMode == MODE_PLAYING); + return (s_playMode == PlayMode::Playing); } bool IsMovieActive() { - return s_playMode != MODE_NONE; + return s_playMode != PlayMode::None; } bool IsReadOnly() @@ -528,7 +535,7 @@ void ChangeWiiPads(bool instantly) bool BeginRecordingInput(const ControllerTypeArray& controllers, const WiimoteEnabledArray& wiimotes) { - if (s_playMode != MODE_NONE || + if (s_playMode != PlayMode::None || (controllers == ControllerTypeArray{} && wiimotes == WiimoteEnabledArray{})) return false; @@ -585,7 +592,7 @@ bool BeginRecordingInput(const ControllerTypeArray& controllers, Wiimote::ResetAllWiimotes(); } - s_playMode = MODE_RECORDING; + s_playMode = PlayMode::Recording; s_author = Config::Get(Config::MAIN_MOVIE_MOVIE_AUTHOR); s_temp_input.clear(); @@ -937,7 +944,7 @@ void ReadHeader() // NOTE: Host Thread bool PlayInput(const std::string& movie_path, std::optional* savestate_path) { - if (s_playMode != MODE_NONE) + if (s_playMode != PlayMode::None) return false; File::IOFile recording_file(movie_path, "rb"); @@ -959,7 +966,7 @@ bool PlayInput(const std::string& movie_path, std::optional* savest s_currentLagCount = 0; s_currentInputCount = 0; - s_playMode = MODE_PLAYING; + s_playMode = PlayMode::Playing; // Wiimotes cause desync issues if they're not reset before launching the game Wiimote::ResetAllWiimotes(); @@ -1139,18 +1146,18 @@ void LoadInput(const std::string& movie_path) { if (s_bReadOnly) { - if (s_playMode != MODE_PLAYING) + if (s_playMode != PlayMode::Playing) { - s_playMode = MODE_PLAYING; + s_playMode = PlayMode::Playing; Core::UpdateWantDeterminism(); Core::DisplayMessage("Switched to playback", 2000); } } else { - if (s_playMode != MODE_RECORDING) + if (s_playMode != PlayMode::Recording) { - s_playMode = MODE_RECORDING; + s_playMode = PlayMode::Recording; Core::UpdateWantDeterminism(); Core::DisplayMessage("Switched to recording", 2000); } @@ -1317,10 +1324,10 @@ void EndPlayInput(bool cont) // If !IsMovieActive(), changing s_playMode requires calling UpdateWantDeterminism ASSERT(IsMovieActive()); - s_playMode = MODE_RECORDING; + s_playMode = PlayMode::Recording; Core::DisplayMessage("Reached movie end. Resuming recording.", 2000); } - else if (s_playMode != MODE_NONE) + else if (s_playMode != PlayMode::None) { // We can be called by EmuThread during boot (CPU::State::PowerDown) bool was_running = Core::IsRunningAndStarted() && !CPU::IsStepping(); @@ -1328,7 +1335,7 @@ void EndPlayInput(bool cont) CPU::Break(); s_rerecords = 0; s_currentByte = 0; - s_playMode = MODE_NONE; + s_playMode = PlayMode::None; Core::DisplayMessage("Movie End.", 2000); s_bRecordingFromSaveState = false; // we don't clear these things because otherwise we can't resume playback if we load a movie diff --git a/Source/Core/Core/Movie.h b/Source/Core/Core/Movie.h index a774f7ad4eea..25747cd8a26d 100644 --- a/Source/Core/Core/Movie.h +++ b/Source/Core/Core/Movie.h @@ -37,13 +37,6 @@ class EncryptionKey; namespace Movie { // Enumerations and structs -enum PlayMode -{ - MODE_NONE = 0, - MODE_RECORDING, - MODE_PLAYING -}; - enum class ControllerType { None = 0, From edbe202aa3290ca267acebc7e9602a08e447754f Mon Sep 17 00:00:00 2001 From: Dentomologist Date: Thu, 17 Feb 2022 09:54:07 -0800 Subject: [PATCH 056/237] VideoCommon: Convert OptionType to enum class --- .../Graphics/PostProcessingConfigWindow.cpp | 8 ++-- Source/Core/VideoCommon/PostProcessing.cpp | 37 +++++++++---------- Source/Core/VideoCommon/PostProcessing.h | 10 ++--- 3 files changed, 27 insertions(+), 28 deletions(-) diff --git a/Source/Core/DolphinQt/Config/Graphics/PostProcessingConfigWindow.cpp b/Source/Core/DolphinQt/Config/Graphics/PostProcessingConfigWindow.cpp index 434aaa6d2f82..59907bd39b20 100644 --- a/Source/Core/DolphinQt/Config/Graphics/PostProcessingConfigWindow.cpp +++ b/Source/Core/DolphinQt/Config/Graphics/PostProcessingConfigWindow.cpp @@ -227,11 +227,11 @@ u32 PostProcessingConfigWindow::ConfigGroup::AddWidgets(PostProcessingConfigWind switch (m_config_option->m_type) { - case OptionType::OPTION_BOOL: + case OptionType::Bool: return AddBool(parent, grid, row); - case OptionType::OPTION_FLOAT: + case OptionType::Float: return AddFloat(parent, grid, row); - case OptionType::OPTION_INTEGER: + case OptionType::Integer: return AddInteger(parent, grid, row); default: // obviously shouldn't get here @@ -336,7 +336,7 @@ void PostProcessingConfigWindow::ConfigGroup::EnableSuboptions(const bool state) { for (auto& it : m_subgroups) { - if (it->m_config_option->m_type == OptionType::OPTION_BOOL) + if (it->m_config_option->m_type == OptionType::Bool) { it->m_checkbox->setEnabled(state); } diff --git a/Source/Core/VideoCommon/PostProcessing.cpp b/Source/Core/VideoCommon/PostProcessing.cpp index 69c3dcfa0c64..d9f7a8e8c690 100644 --- a/Source/Core/VideoCommon/PostProcessing.cpp +++ b/Source/Core/VideoCommon/PostProcessing.cpp @@ -166,11 +166,11 @@ void PostProcessingConfiguration::LoadOptions(const std::string& code) option.m_dirty = true; if (it.m_type == "OptionBool") - option.m_type = ConfigurationOption::OptionType::OPTION_BOOL; + option.m_type = ConfigurationOption::OptionType::Bool; else if (it.m_type == "OptionRangeFloat") - option.m_type = ConfigurationOption::OptionType::OPTION_FLOAT; + option.m_type = ConfigurationOption::OptionType::Float; else if (it.m_type == "OptionRangeInteger") - option.m_type = ConfigurationOption::OptionType::OPTION_INTEGER; + option.m_type = ConfigurationOption::OptionType::Integer; for (const auto& string_option : it.m_options) { @@ -213,17 +213,17 @@ void PostProcessingConfiguration::LoadOptions(const std::string& code) output_float = &option.m_float_step_values; } - if (option.m_type == ConfigurationOption::OptionType::OPTION_BOOL) + if (option.m_type == ConfigurationOption::OptionType::Bool) { TryParse(string_option.second, &option.m_bool_value); } - else if (option.m_type == ConfigurationOption::OptionType::OPTION_INTEGER) + else if (option.m_type == ConfigurationOption::OptionType::Integer) { TryParseVector(string_option.second, output_integer); if (output_integer->size() > 4) output_integer->erase(output_integer->begin() + 4, output_integer->end()); } - else if (option.m_type == ConfigurationOption::OptionType::OPTION_FLOAT) + else if (option.m_type == ConfigurationOption::OptionType::Float) { TryParseVector(string_option.second, output_float); if (output_float->size() > 4) @@ -245,11 +245,11 @@ void PostProcessingConfiguration::LoadOptionsConfiguration() { switch (it.second.m_type) { - case ConfigurationOption::OptionType::OPTION_BOOL: + case ConfigurationOption::OptionType::Bool: ini.GetOrCreateSection(section)->Get(it.second.m_option_name, &it.second.m_bool_value, it.second.m_bool_value); break; - case ConfigurationOption::OptionType::OPTION_INTEGER: + case ConfigurationOption::OptionType::Integer: { std::string value; ini.GetOrCreateSection(section)->Get(it.second.m_option_name, &value); @@ -257,7 +257,7 @@ void PostProcessingConfiguration::LoadOptionsConfiguration() TryParseVector(value, &it.second.m_integer_values); } break; - case ConfigurationOption::OptionType::OPTION_FLOAT: + case ConfigurationOption::OptionType::Float: { std::string value; ini.GetOrCreateSection(section)->Get(it.second.m_option_name, &value); @@ -279,12 +279,12 @@ void PostProcessingConfiguration::SaveOptionsConfiguration() { switch (it.second.m_type) { - case ConfigurationOption::OptionType::OPTION_BOOL: + case ConfigurationOption::OptionType::Bool: { ini.GetOrCreateSection(section)->Set(it.second.m_option_name, it.second.m_bool_value); } break; - case ConfigurationOption::OptionType::OPTION_INTEGER: + case ConfigurationOption::OptionType::Integer: { std::string value; for (size_t i = 0; i < it.second.m_integer_values.size(); ++i) @@ -295,7 +295,7 @@ void PostProcessingConfiguration::SaveOptionsConfiguration() ini.GetOrCreateSection(section)->Set(it.second.m_option_name, value); } break; - case ConfigurationOption::OptionType::OPTION_FLOAT: + case ConfigurationOption::OptionType::Float: { std::ostringstream value; value.imbue(std::locale("C")); @@ -459,15 +459,14 @@ std::string PostProcessing::GetUniformBufferHeader() const // Custom options/uniforms for (const auto& it : m_config.GetOptions()) { - if (it.second.m_type == - PostProcessingConfiguration::ConfigurationOption::OptionType::OPTION_BOOL) + if (it.second.m_type == PostProcessingConfiguration::ConfigurationOption::OptionType::Bool) { ss << fmt::format(" int {};\n", it.first); for (u32 i = 0; i < 3; i++) ss << " int ubo_align_" << unused_counter++ << "_;\n"; } else if (it.second.m_type == - PostProcessingConfiguration::ConfigurationOption::OptionType::OPTION_INTEGER) + PostProcessingConfiguration::ConfigurationOption::OptionType::Integer) { u32 count = static_cast(it.second.m_integer_values.size()); if (count == 1) @@ -479,7 +478,7 @@ std::string PostProcessing::GetUniformBufferHeader() const ss << " int ubo_align_" << unused_counter++ << "_;\n"; } else if (it.second.m_type == - PostProcessingConfiguration::ConfigurationOption::OptionType::OPTION_FLOAT) + PostProcessingConfiguration::ConfigurationOption::OptionType::Float) { u32 count = static_cast(it.second.m_float_values.size()); if (count == 1) @@ -707,17 +706,17 @@ void PostProcessing::FillUniformBuffer(const MathUtil::Rectangle& src, switch (it.second.m_type) { - case PostProcessingConfiguration::ConfigurationOption::OptionType::OPTION_BOOL: + case PostProcessingConfiguration::ConfigurationOption::OptionType::Bool: value.as_bool[0] = it.second.m_bool_value ? 1 : 0; break; - case PostProcessingConfiguration::ConfigurationOption::OptionType::OPTION_INTEGER: + case PostProcessingConfiguration::ConfigurationOption::OptionType::Integer: ASSERT(it.second.m_integer_values.size() < 4); std::copy_n(it.second.m_integer_values.begin(), it.second.m_integer_values.size(), value.as_int); break; - case PostProcessingConfiguration::ConfigurationOption::OptionType::OPTION_FLOAT: + case PostProcessingConfiguration::ConfigurationOption::OptionType::Float: ASSERT(it.second.m_float_values.size() < 4); std::copy_n(it.second.m_float_values.begin(), it.second.m_float_values.size(), value.as_float); diff --git a/Source/Core/VideoCommon/PostProcessing.h b/Source/Core/VideoCommon/PostProcessing.h index aafee8f1f68e..c7f94535f5d1 100644 --- a/Source/Core/VideoCommon/PostProcessing.h +++ b/Source/Core/VideoCommon/PostProcessing.h @@ -24,11 +24,11 @@ class PostProcessingConfiguration public: struct ConfigurationOption { - enum OptionType + enum class OptionType { - OPTION_BOOL = 0, - OPTION_FLOAT, - OPTION_INTEGER, + Bool = 0, + Float, + Integer, }; bool m_bool_value = false; @@ -45,7 +45,7 @@ class PostProcessingConfiguration std::vector m_float_step_values; std::vector m_integer_step_values; - OptionType m_type = OptionType::OPTION_BOOL; + OptionType m_type = OptionType::Bool; std::string m_gui_name; std::string m_option_name; From 63181f04464c27802e7951ffc06604a206903496 Mon Sep 17 00:00:00 2001 From: Techjar Date: Fri, 18 Feb 2022 06:34:01 -0500 Subject: [PATCH 057/237] ShaderGenCommon: Add missing include --- Source/Core/VideoCommon/ShaderGenCommon.h | 1 + 1 file changed, 1 insertion(+) diff --git a/Source/Core/VideoCommon/ShaderGenCommon.h b/Source/Core/VideoCommon/ShaderGenCommon.h index edbd931db9fb..bec69fe7c0a2 100644 --- a/Source/Core/VideoCommon/ShaderGenCommon.h +++ b/Source/Core/VideoCommon/ShaderGenCommon.h @@ -4,6 +4,7 @@ #pragma once #include +#include #include #include #include From aff45c91fc0729595a2e40511404386726bfd205 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Tue, 25 Jan 2022 20:34:03 +0100 Subject: [PATCH 058/237] Port Wiimote source settings to the new config system This lets us finally get rid of BootManager's ConfigCache! --- Source/Android/jni/MainAndroid.cpp | 1 - Source/Core/Core/BootManager.cpp | 90 ------------------- Source/Core/Core/CMakeLists.txt | 2 + Source/Core/Core/Config/WiimoteSettings.cpp | 30 +++++++ Source/Core/Core/Config/WiimoteSettings.h | 20 +++++ .../Core/ConfigLoaders/GameConfigLoader.cpp | 6 ++ .../Core/ConfigLoaders/IsSettingSaveable.cpp | 9 ++ Source/Core/Core/HW/Wiimote.cpp | 33 ++++++- Source/Core/Core/HW/Wiimote.h | 5 -- .../Core/Core/HW/WiimoteReal/WiimoteReal.cpp | 47 ++++------ Source/Core/Core/Movie.cpp | 8 +- Source/Core/Core/NetPlayClient.cpp | 5 +- Source/Core/DolphinLib.props | 2 + .../Config/WiimoteControllersWidget.cpp | 17 ++-- Source/Core/DolphinQt/MainWindow.cpp | 5 +- Source/Core/UICommon/UICommon.cpp | 24 ----- Source/Core/UICommon/UICommon.h | 2 - 17 files changed, 134 insertions(+), 172 deletions(-) create mode 100644 Source/Core/Core/Config/WiimoteSettings.cpp create mode 100644 Source/Core/Core/Config/WiimoteSettings.h diff --git a/Source/Android/jni/MainAndroid.cpp b/Source/Android/jni/MainAndroid.cpp index b533ba74d641..5abd318b1fca 100644 --- a/Source/Android/jni/MainAndroid.cpp +++ b/Source/Android/jni/MainAndroid.cpp @@ -496,7 +496,6 @@ JNIEXPORT void JNICALL Java_org_dolphinemu_dolphinemu_NativeLibrary_RefreshWiimo JNIEXPORT void JNICALL Java_org_dolphinemu_dolphinemu_NativeLibrary_ReloadWiimoteConfig(JNIEnv*, jclass) { - WiimoteReal::LoadSettings(); Wiimote::LoadConfig(); } diff --git a/Source/Core/Core/BootManager.cpp b/Source/Core/Core/BootManager.cpp index 7ca13a77296a..755a0f92777d 100644 --- a/Source/Core/Core/BootManager.cpp +++ b/Source/Core/Core/BootManager.cpp @@ -53,58 +53,6 @@ namespace BootManager { -// TODO this is an ugly hack which allows us to restore values trampled by per-game settings -// Apply fire liberally -struct ConfigCache -{ -public: - // fill the cache with values from the configuration - void SaveConfig(const SConfig& config); - // restore values to the configuration from the cache - void RestoreConfig(SConfig* config); - - // These store if the relevant setting should be reset back later (true) or if it should be left - // alone on restore (false) - bool bSetVolume = false; - std::array bSetWiimoteSource{}; - -private: - bool valid = false; - std::array iWiimoteSource{}; -}; - -void ConfigCache::SaveConfig(const SConfig& config) -{ - valid = true; - - for (int i = 0; i != MAX_BBMOTES; ++i) - iWiimoteSource[i] = WiimoteCommon::GetSource(i); - - bSetVolume = false; - bSetWiimoteSource.fill(false); -} - -void ConfigCache::RestoreConfig(SConfig* config) -{ - if (!valid) - return; - - valid = false; - - // Only change these back if they were actually set by game ini, since they can be changed while a - // game is running. - if (config->bWii) - { - for (unsigned int i = 0; i < MAX_BBMOTES; ++i) - { - if (bSetWiimoteSource[i]) - WiimoteCommon::SetSource(i, iWiimoteSource[i]); - } - } -} - -static ConfigCache config_cache; - // Boot the ISO or file bool BootCore(std::unique_ptr boot, const WindowSystemInfo& wsi) { @@ -113,46 +61,9 @@ bool BootCore(std::unique_ptr boot, const WindowSystemInfo& wsi) SConfig& StartUp = SConfig::GetInstance(); - config_cache.SaveConfig(StartUp); - if (!StartUp.SetPathsAndGameMetadata(*boot)) return false; - // Load game specific settings - if (!std::holds_alternative(boot->parameters)) - { - IniFile game_ini = StartUp.LoadGameIni(); - - // General settings - IniFile::Section* controls_section = game_ini.GetOrCreateSection("Controls"); - - // Wii settings - if (StartUp.bWii) - { - int source; - for (unsigned int i = 0; i < MAX_WIIMOTES; ++i) - { - controls_section->Get(fmt::format("WiimoteSource{}", i), &source, -1); - if (source != -1 && WiimoteCommon::GetSource(i) != WiimoteSource(source) && - WiimoteSource(source) >= WiimoteSource::None && - WiimoteSource(source) <= WiimoteSource::Real) - { - config_cache.bSetWiimoteSource[i] = true; - WiimoteCommon::SetSource(i, WiimoteSource(source)); - } - } - controls_section->Get("WiimoteSourceBB", &source, -1); - if (source != -1 && - WiimoteCommon::GetSource(WIIMOTE_BALANCE_BOARD) != WiimoteSource(source) && - (WiimoteSource(source) == WiimoteSource::None || - WiimoteSource(source) == WiimoteSource::Real)) - { - config_cache.bSetWiimoteSource[WIIMOTE_BALANCE_BOARD] = true; - WiimoteCommon::SetSource(WIIMOTE_BALANCE_BOARD, WiimoteSource(source)); - } - } - } - // Movie settings if (Movie::IsPlayingInput() && Movie::IsConfigSaved()) { @@ -313,7 +224,6 @@ void RestoreConfig() Config::RemoveLayer(Config::LayerType::GlobalGame); Config::RemoveLayer(Config::LayerType::LocalGame); SConfig::GetInstance().ResetRunningGameMetadata(); - config_cache.RestoreConfig(&SConfig::GetInstance()); } } // namespace BootManager diff --git a/Source/Core/Core/CMakeLists.txt b/Source/Core/Core/CMakeLists.txt index c0608572df94..4063e0156221 100644 --- a/Source/Core/Core/CMakeLists.txt +++ b/Source/Core/Core/CMakeLists.txt @@ -36,6 +36,8 @@ add_library(core Config/SYSCONFSettings.h Config/UISettings.cpp Config/UISettings.h + Config/WiimoteSettings.cpp + Config/WiimoteSettings.h ConfigLoaders/BaseConfigLoader.cpp ConfigLoaders/BaseConfigLoader.h ConfigLoaders/GameConfigLoader.cpp diff --git a/Source/Core/Core/Config/WiimoteSettings.cpp b/Source/Core/Core/Config/WiimoteSettings.cpp new file mode 100644 index 000000000000..6a9fe8efb2b1 --- /dev/null +++ b/Source/Core/Core/Config/WiimoteSettings.cpp @@ -0,0 +1,30 @@ +// Copyright 2022 Dolphin Emulator Project +// SPDX-License-Identifier: GPL-2.0-or-later + +#include "Core/Config/WiimoteSettings.h" + +#include "Core/HW/Wiimote.h" + +namespace Config +{ +const Info WIIMOTE_1_SOURCE{{System::WiiPad, "Wiimote1", "Source"}, + WiimoteSource::Emulated}; +const Info WIIMOTE_2_SOURCE{{System::WiiPad, "Wiimote2", "Source"}, + WiimoteSource::None}; +const Info WIIMOTE_3_SOURCE{{System::WiiPad, "Wiimote3", "Source"}, + WiimoteSource::None}; +const Info WIIMOTE_4_SOURCE{{System::WiiPad, "Wiimote4", "Source"}, + WiimoteSource::None}; +const Info WIIMOTE_BB_SOURCE{{System::WiiPad, "BalanceBoard", "Source"}, + WiimoteSource::None}; + +const Info& GetInfoForWiimoteSource(int index) +{ + static const std::array*, 5> infos{ + &WIIMOTE_1_SOURCE, &WIIMOTE_2_SOURCE, &WIIMOTE_3_SOURCE, + &WIIMOTE_4_SOURCE, &WIIMOTE_BB_SOURCE, + }; + return *infos[index]; +} + +} // namespace Config diff --git a/Source/Core/Core/Config/WiimoteSettings.h b/Source/Core/Core/Config/WiimoteSettings.h new file mode 100644 index 000000000000..9e97c3572685 --- /dev/null +++ b/Source/Core/Core/Config/WiimoteSettings.h @@ -0,0 +1,20 @@ +// Copyright 2022 Dolphin Emulator Project +// SPDX-License-Identifier: GPL-2.0-or-later + +#pragma once + +#include "Common/Config/Config.h" + +enum class WiimoteSource; + +namespace Config +{ +extern const Info WIIMOTE_1_SOURCE; +extern const Info WIIMOTE_2_SOURCE; +extern const Info WIIMOTE_3_SOURCE; +extern const Info WIIMOTE_4_SOURCE; +extern const Info WIIMOTE_BB_SOURCE; + +const Info& GetInfoForWiimoteSource(int index); + +} // namespace Config diff --git a/Source/Core/Core/ConfigLoaders/GameConfigLoader.cpp b/Source/Core/Core/ConfigLoaders/GameConfigLoader.cpp index cf864b97b253..62fe3e596872 100644 --- a/Source/Core/Core/ConfigLoaders/GameConfigLoader.cpp +++ b/Source/Core/Core/ConfigLoaders/GameConfigLoader.cpp @@ -26,6 +26,7 @@ #include "Core/Config/MainSettings.h" #include "Core/Config/SYSCONFSettings.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigLoaders/IsSettingSaveable.h" namespace ConfigLoaders @@ -79,6 +80,11 @@ static const INIToLocationMap& GetINIToLocationMap() {{"Controls", "PadType1"}, {Config::GetInfoForSIDevice(1).GetLocation()}}, {{"Controls", "PadType2"}, {Config::GetInfoForSIDevice(2).GetLocation()}}, {{"Controls", "PadType3"}, {Config::GetInfoForSIDevice(3).GetLocation()}}, + {{"Controls", "WiimoteSource0"}, {Config::WIIMOTE_1_SOURCE.GetLocation()}}, + {{"Controls", "WiimoteSource1"}, {Config::WIIMOTE_2_SOURCE.GetLocation()}}, + {{"Controls", "WiimoteSource2"}, {Config::WIIMOTE_3_SOURCE.GetLocation()}}, + {{"Controls", "WiimoteSource3"}, {Config::WIIMOTE_4_SOURCE.GetLocation()}}, + {{"Controls", "WiimoteSourceBB"}, {Config::WIIMOTE_BB_SOURCE.GetLocation()}}, }; return ini_to_location; } diff --git a/Source/Core/Core/ConfigLoaders/IsSettingSaveable.cpp b/Source/Core/Core/ConfigLoaders/IsSettingSaveable.cpp index 18fa55f17639..2dd5a5697d0e 100644 --- a/Source/Core/Core/ConfigLoaders/IsSettingSaveable.cpp +++ b/Source/Core/Core/ConfigLoaders/IsSettingSaveable.cpp @@ -10,6 +10,7 @@ #include "Core/Config/GraphicsSettings.h" #include "Core/Config/MainSettings.h" #include "Core/Config/UISettings.h" +#include "Core/Config/WiimoteSettings.h" namespace ConfigLoaders { @@ -129,6 +130,14 @@ bool IsSettingSaveable(const Config::Location& config_location) // UI.General &Config::MAIN_USE_DISCORD_PRESENCE.GetLocation(), + + // Wiimote + + &Config::WIIMOTE_1_SOURCE.GetLocation(), + &Config::WIIMOTE_2_SOURCE.GetLocation(), + &Config::WIIMOTE_3_SOURCE.GetLocation(), + &Config::WIIMOTE_4_SOURCE.GetLocation(), + &Config::WIIMOTE_BB_SOURCE.GetLocation(), }; return std::any_of(begin(s_setting_saveable), end(s_setting_saveable), diff --git a/Source/Core/Core/HW/Wiimote.cpp b/Source/Core/Core/HW/Wiimote.cpp index 30d522b5643e..675e53334681 100644 --- a/Source/Core/Core/HW/Wiimote.cpp +++ b/Source/Core/Core/HW/Wiimote.cpp @@ -3,9 +3,13 @@ #include "Core/HW/Wiimote.h" +#include + #include "Common/ChunkFile.h" #include "Common/CommonTypes.h" +#include "Common/Config/Config.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigManager.h" #include "Core/Core.h" #include "Core/HW/WiimoteEmu/WiimoteEmu.h" @@ -23,16 +27,17 @@ // Limit the amount of wiimote connect requests, when a button is pressed in disconnected state static std::array s_last_connect_request_counter; -namespace WiimoteCommon +namespace { static std::array, MAX_BBMOTES> s_wiimote_sources; +static std::optional s_config_callback_id = std::nullopt; WiimoteSource GetSource(unsigned int index) { return s_wiimote_sources[index]; } -void SetSource(unsigned int index, WiimoteSource source) +void OnSourceChanged(unsigned int index, WiimoteSource source) { const WiimoteSource previous_source = s_wiimote_sources[index].exchange(source); @@ -44,9 +49,19 @@ void SetSource(unsigned int index, WiimoteSource source) WiimoteReal::HandleWiimoteSourceChange(index); - Core::RunAsCPUThread([index] { UpdateSource(index); }); + Core::RunAsCPUThread([index] { WiimoteCommon::UpdateSource(index); }); } +void RefreshConfig() +{ + for (int i = 0; i < MAX_BBMOTES; ++i) + OnSourceChanged(i, Config::Get(Config::GetInfoForWiimoteSource(i))); +} + +} // namespace + +namespace WiimoteCommon +{ void UpdateSource(unsigned int index) { const auto bluetooth = WiiUtils::GetBluetoothEmuDevice(); @@ -144,6 +159,12 @@ void Shutdown() s_config.ClearControllers(); WiimoteReal::Stop(); + + if (s_config_callback_id) + { + Config::RemoveConfigChangedCallback(*s_config_callback_id); + s_config_callback_id = std::nullopt; + } } void Initialize(InitializeMode init_mode) @@ -158,6 +179,10 @@ void Initialize(InitializeMode init_mode) LoadConfig(); + if (!s_config_callback_id) + s_config_callback_id = Config::AddConfigChangedCallback(RefreshConfig); + RefreshConfig(); + WiimoteReal::Initialize(init_mode); // Reload Wiimotes with our settings @@ -191,7 +216,7 @@ void DoState(PointerWrap& p) { for (int i = 0; i < MAX_BBMOTES; ++i) { - const WiimoteSource source = WiimoteCommon::GetSource(i); + const WiimoteSource source = GetSource(i); auto state_wiimote_source = u8(source); p.Do(state_wiimote_source); diff --git a/Source/Core/Core/HW/Wiimote.h b/Source/Core/Core/HW/Wiimote.h index 1df00539ce7e..ee59aec2e111 100644 --- a/Source/Core/Core/HW/Wiimote.h +++ b/Source/Core/Core/HW/Wiimote.h @@ -54,9 +54,6 @@ namespace WiimoteCommon { class HIDWiimote; -WiimoteSource GetSource(unsigned int index); -void SetSource(unsigned int index, WiimoteSource source); - // Used to reconnect WiimoteDevice instance to HID source. // Must be run from CPU thread. void UpdateSource(unsigned int index); @@ -108,6 +105,4 @@ void Resume(); void Pause(); void Refresh(); -void LoadSettings(); - } // namespace WiimoteReal diff --git a/Source/Core/Core/HW/WiimoteReal/WiimoteReal.cpp b/Source/Core/Core/HW/WiimoteReal/WiimoteReal.cpp index 4f2896ad49e7..6b3655177431 100644 --- a/Source/Core/Core/HW/WiimoteReal/WiimoteReal.cpp +++ b/Source/Core/Core/HW/WiimoteReal/WiimoteReal.cpp @@ -11,11 +11,14 @@ #include "Common/ChunkFile.h" #include "Common/CommonTypes.h" +#include "Common/Config/Config.h" #include "Common/FileUtil.h" #include "Common/IniFile.h" #include "Common/Swap.h" #include "Common/Thread.h" + #include "Core/Config/MainSettings.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigManager.h" #include "Core/Core.h" #include "Core/HW/Wiimote.h" @@ -25,6 +28,7 @@ #include "Core/HW/WiimoteReal/IOLinux.h" #include "Core/HW/WiimoteReal/IOWin.h" #include "Core/HW/WiimoteReal/IOhidapi.h" + #include "InputCommon/ControllerInterface/Wiimote/WiimoteController.h" #include "InputCommon/InputConfig.h" @@ -78,8 +82,11 @@ static void TryToFillWiimoteSlot(u32 index) { std::lock_guard lk(g_wiimotes_mutex); - if (g_wiimotes[index] || WiimoteCommon::GetSource(index) != WiimoteSource::Real) + if (g_wiimotes[index] || + Config::Get(Config::GetInfoForWiimoteSource(index)) != WiimoteSource::Real) + { return; + } // If the pool is empty, attempt to steal from ControllerInterface. if (s_wiimote_pool.empty()) @@ -531,9 +538,11 @@ static unsigned int CalculateWantedWiimotes() std::lock_guard lk(g_wiimotes_mutex); // Figure out how many real Wiimotes are required unsigned int wanted_wiimotes = 0; - for (unsigned int i = 0; i < MAX_WIIMOTES; ++i) - if (WiimoteCommon::GetSource(i) == WiimoteSource::Real && !g_wiimotes[i]) + for (int i = 0; i < MAX_WIIMOTES; ++i) + { + if (Config::Get(Config::GetInfoForWiimoteSource(i)) == WiimoteSource::Real && !g_wiimotes[i]) ++wanted_wiimotes; + } return wanted_wiimotes; } @@ -541,9 +550,11 @@ static unsigned int CalculateWantedBB() { std::lock_guard lk(g_wiimotes_mutex); unsigned int wanted_bb = 0; - if (WiimoteCommon::GetSource(WIIMOTE_BALANCE_BOARD) == WiimoteSource::Real && + if (Config::Get(Config::WIIMOTE_BB_SOURCE) == WiimoteSource::Real && !g_wiimotes[WIIMOTE_BALANCE_BOARD]) + { ++wanted_bb; + } return wanted_bb; } @@ -823,32 +834,6 @@ int Wiimote::GetIndex() const return m_index; } -void LoadSettings() -{ - std::string ini_filename = File::GetUserPath(D_CONFIG_IDX) + WIIMOTE_INI_NAME ".ini"; - - IniFile inifile; - inifile.Load(ini_filename); - - for (unsigned int i = 0; i < MAX_WIIMOTES; ++i) - { - std::string secname("Wiimote"); - secname += static_cast('1' + i); - IniFile::Section& sec = *inifile.GetOrCreateSection(secname); - - unsigned int source = 0; - sec.Get("Source", &source, i ? int(WiimoteSource::None) : int(WiimoteSource::Emulated)); - WiimoteCommon::SetSource(i, WiimoteSource(source)); - } - - std::string secname("BalanceBoard"); - IniFile::Section& sec = *inifile.GetOrCreateSection(secname); - - unsigned int bb_source = 0; - sec.Get("Source", &bb_source, int(WiimoteSource::None)); - WiimoteCommon::SetSource(WIIMOTE_BALANCE_BOARD, WiimoteSource(bb_source)); -} - // config dialog calls this when some settings change void Initialize(::Wiimote::InitializeMode init_mode) { @@ -924,7 +909,7 @@ void Pause() // Called from the Wiimote scanner thread (or UI thread on source change) static bool TryToConnectWiimoteToSlot(std::unique_ptr& wm, unsigned int i) { - if (WiimoteCommon::GetSource(i) != WiimoteSource::Real || g_wiimotes[i]) + if (Config::Get(Config::GetInfoForWiimoteSource(i)) != WiimoteSource::Real || g_wiimotes[i]) return false; if (!wm->Connect(i)) diff --git a/Source/Core/Core/Movie.cpp b/Source/Core/Core/Movie.cpp index 25b8f540188b..ec168b5c3616 100644 --- a/Source/Core/Core/Movie.cpp +++ b/Source/Core/Core/Movie.cpp @@ -36,6 +36,7 @@ #include "Core/Boot/Boot.h" #include "Core/Config/MainSettings.h" #include "Core/Config/SYSCONFSettings.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigLoaders/MovieConfigLoader.h" #include "Core/ConfigManager.h" #include "Core/Core.h" @@ -169,7 +170,7 @@ std::string GetInputDisplay() s_controllers[i] = ControllerType::GC; else s_controllers[i] = ControllerType::None; - s_wiimotes[i] = WiimoteCommon::GetSource(i) != WiimoteSource::None; + s_wiimotes[i] = Config::Get(Config::GetInfoForWiimoteSource(i)) != WiimoteSource::None; } } @@ -506,7 +507,7 @@ void ChangeWiiPads(bool instantly) for (int i = 0; i < MAX_WIIMOTES; ++i) { - wiimotes[i] = WiimoteCommon::GetSource(i) != WiimoteSource::None; + wiimotes[i] = Config::Get(Config::GetInfoForWiimoteSource(i)) != WiimoteSource::None; } // This is important for Wiimotes, because they can desync easily if they get re-activated @@ -518,7 +519,8 @@ void ChangeWiiPads(bool instantly) { const bool is_using_wiimote = IsUsingWiimote(i); - WiimoteCommon::SetSource(i, is_using_wiimote ? WiimoteSource::Emulated : WiimoteSource::None); + Config::SetCurrent(Config::GetInfoForWiimoteSource(i), + is_using_wiimote ? WiimoteSource::Emulated : WiimoteSource::None); if (bt != nullptr) bt->AccessWiimoteByIndex(i)->Activate(is_using_wiimote); } diff --git a/Source/Core/Core/NetPlayClient.cpp b/Source/Core/Core/NetPlayClient.cpp index 54aeb83e8312..2d60e0f77696 100644 --- a/Source/Core/Core/NetPlayClient.cpp +++ b/Source/Core/Core/NetPlayClient.cpp @@ -37,6 +37,7 @@ #include "Core/Config/MainSettings.h" #include "Core/Config/NetplaySettings.h" #include "Core/Config/SessionSettings.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigManager.h" #include "Core/GeckoCode.h" #include "Core/HW/EXI/EXI.h" @@ -1741,8 +1742,8 @@ bool NetPlayClient::StartGame(const std::string& path) for (unsigned int i = 0; i < 4; ++i) { - WiimoteCommon::SetSource(i, - m_wiimote_map[i] > 0 ? WiimoteSource::Emulated : WiimoteSource::None); + Config::SetCurrent(Config::GetInfoForWiimoteSource(i), + m_wiimote_map[i] > 0 ? WiimoteSource::Emulated : WiimoteSource::None); } // boot game diff --git a/Source/Core/DolphinLib.props b/Source/Core/DolphinLib.props index c0d465aafe17..79557b58c73e 100644 --- a/Source/Core/DolphinLib.props +++ b/Source/Core/DolphinLib.props @@ -176,6 +176,7 @@ + @@ -763,6 +764,7 @@ + diff --git a/Source/Core/DolphinQt/Config/WiimoteControllersWidget.cpp b/Source/Core/DolphinQt/Config/WiimoteControllersWidget.cpp index e5616da4bf3b..613455ead601 100644 --- a/Source/Core/DolphinQt/Config/WiimoteControllersWidget.cpp +++ b/Source/Core/DolphinQt/Config/WiimoteControllersWidget.cpp @@ -17,7 +17,10 @@ #include #include +#include "Common/Config/Config.h" + #include "Core/Config/MainSettings.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigManager.h" #include "Core/Core.h" #include "Core/HW/Wiimote.h" @@ -301,9 +304,9 @@ void WiimoteControllersWidget::LoadSettings() { for (size_t i = 0; i < m_wiimote_groups.size(); i++) { - m_wiimote_boxes[i]->setCurrentIndex(int(WiimoteCommon::GetSource(u32(i)))); + m_wiimote_boxes[i]->setCurrentIndex(int(Config::Get(Config::GetInfoForWiimoteSource(int(i))))); } - m_wiimote_real_balance_board->setChecked(WiimoteCommon::GetSource(WIIMOTE_BALANCE_BOARD) == + m_wiimote_real_balance_board->setChecked(Config::Get(Config::WIIMOTE_BB_SOURCE) == WiimoteSource::Real); m_wiimote_speaker_data->setChecked(Config::Get(Config::MAIN_WIIMOTE_ENABLE_SPEAKER)); m_wiimote_ciface->setChecked(Config::Get(Config::MAIN_CONNECT_WIIMOTES_FOR_CONTROLLER_INTERFACE)); @@ -328,17 +331,15 @@ void WiimoteControllersWidget::SaveSettings() Config::SetBaseOrCurrent(Config::MAIN_BLUETOOTH_PASSTHROUGH_ENABLED, m_wiimote_passthrough->isChecked()); - WiimoteCommon::SetSource(WIIMOTE_BALANCE_BOARD, m_wiimote_real_balance_board->isChecked() ? - WiimoteSource::Real : - WiimoteSource::None); + const WiimoteSource bb_source = + m_wiimote_real_balance_board->isChecked() ? WiimoteSource::Real : WiimoteSource::None; + Config::SetBaseOrCurrent(Config::WIIMOTE_BB_SOURCE, bb_source); for (size_t i = 0; i < m_wiimote_groups.size(); i++) { const int index = m_wiimote_boxes[i]->currentIndex(); - WiimoteCommon::SetSource(u32(i), WiimoteSource(index)); + Config::SetBaseOrCurrent(Config::GetInfoForWiimoteSource(int(i)), WiimoteSource(index)); } - UICommon::SaveWiimoteSources(); - SConfig::GetInstance().SaveSettings(); } diff --git a/Source/Core/DolphinQt/MainWindow.cpp b/Source/Core/DolphinQt/MainWindow.cpp index 7fc42cc44a52..9c7d4d37ae2c 100644 --- a/Source/Core/DolphinQt/MainWindow.cpp +++ b/Source/Core/DolphinQt/MainWindow.cpp @@ -39,6 +39,7 @@ #include "Core/CommonTitles.h" #include "Core/Config/MainSettings.h" #include "Core/Config/NetplaySettings.h" +#include "Core/Config/WiimoteSettings.h" #include "Core/ConfigManager.h" #include "Core/Core.h" #include "Core/FreeLookManager.h" @@ -1735,7 +1736,7 @@ void MainWindow::OnStartRecording() controllers[i] = Movie::ControllerType::GC; else controllers[i] = Movie::ControllerType::None; - wiimotes[i] = WiimoteCommon::GetSource(i) != WiimoteSource::None; + wiimotes[i] = Config::Get(Config::GetInfoForWiimoteSource(i)) != WiimoteSource::None; } if (Movie::BeginRecordingInput(controllers, wiimotes)) @@ -1795,7 +1796,7 @@ void MainWindow::ShowTASInput() for (int i = 0; i < num_wii_controllers; i++) { - if (WiimoteCommon::GetSource(i) == WiimoteSource::Emulated && + if (Config::Get(Config::GetInfoForWiimoteSource(i)) == WiimoteSource::Emulated && (!Core::IsRunning() || SConfig::GetInstance().bWii)) { m_wii_tas_input_windows[i]->show(); diff --git a/Source/Core/UICommon/UICommon.cpp b/Source/Core/UICommon/UICommon.cpp index a5f072125139..2843a5fa3a68 100644 --- a/Source/Core/UICommon/UICommon.cpp +++ b/Source/Core/UICommon/UICommon.cpp @@ -106,7 +106,6 @@ void Init() SConfig::Init(); Discord::Init(); Common::Log::LogManager::Init(); - WiimoteReal::LoadSettings(); GCAdapter::Init(); VideoBackendBase::ActivateBackend(Config::Get(Config::MAIN_GFX_BACKEND)); @@ -356,29 +355,6 @@ void SetUserDirectory(std::string custom_path) File::SetUserPath(D_USER_IDX, std::move(user_path)); } -void SaveWiimoteSources() -{ - std::string ini_filename = File::GetUserPath(D_CONFIG_IDX) + WIIMOTE_INI_NAME ".ini"; - - IniFile inifile; - inifile.Load(ini_filename); - - for (unsigned int i = 0; i < MAX_WIIMOTES; ++i) - { - std::string secname("Wiimote"); - secname += (char)('1' + i); - IniFile::Section& sec = *inifile.GetOrCreateSection(secname); - - sec.Set("Source", int(WiimoteCommon::GetSource(i))); - } - - std::string secname("BalanceBoard"); - IniFile::Section& sec = *inifile.GetOrCreateSection(secname); - sec.Set("Source", int(WiimoteCommon::GetSource(WIIMOTE_BALANCE_BOARD))); - - inifile.Save(ini_filename); -} - bool TriggerSTMPowerEvent() { const auto ios = IOS::HLE::GetIOS(); diff --git a/Source/Core/UICommon/UICommon.h b/Source/Core/UICommon/UICommon.h index ea1d44e599e4..6bdb11f541d3 100644 --- a/Source/Core/UICommon/UICommon.h +++ b/Source/Core/UICommon/UICommon.h @@ -27,8 +27,6 @@ void SetUserDirectory(std::string custom_path); bool TriggerSTMPowerEvent(); -void SaveWiimoteSources(); - // Return a pretty file size string from byte count. // e.g. 1134278 -> "1.08 MiB" std::string FormatSize(u64 bytes, int decimals = 2); From 1b76171a27063d4e75f8055f901379a47d899d5d Mon Sep 17 00:00:00 2001 From: JosJuice Date: Fri, 18 Feb 2022 21:58:05 +0100 Subject: [PATCH 059/237] Android: Get rid of LegacyIntSetting The only settings that were using LegacyIntSetting are now in the new config system, so there's no reason to have LegacyIntSetting anymore. --- .../features/settings/model/IntSetting.java | 16 ++++- .../settings/model/LegacyIntSetting.java | 26 --------- .../ui/SettingsFragmentPresenter.java | 58 +++++-------------- .../features/settings/utils/SettingsFile.java | 4 -- .../app/src/main/res/values/strings.xml | 11 ---- Source/Android/jni/Config/NativeConfig.cpp | 4 ++ 6 files changed, 35 insertions(+), 84 deletions(-) delete mode 100644 Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyIntSetting.java diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/IntSetting.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/IntSetting.java index d28603cbef79..1871652ea351 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/IntSetting.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/IntSetting.java @@ -21,6 +21,10 @@ public enum IntSetting implements AbstractIntSetting MAIN_SLOT_A(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SlotA", 8), MAIN_SLOT_B(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SlotB", 255), MAIN_FALLBACK_REGION(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "FallbackRegion", 2), + MAIN_SI_DEVICE_0(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SIDevice0", 6), + MAIN_SI_DEVICE_1(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SIDevice1", 0), + MAIN_SI_DEVICE_2(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SIDevice2", 0), + MAIN_SI_DEVICE_3(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SIDevice3", 0), MAIN_AUDIO_VOLUME(Settings.FILE_DOLPHIN, Settings.SECTION_INI_DSP, "Volume", 100), @@ -60,13 +64,23 @@ public enum IntSetting implements AbstractIntSetting GFX_STEREO_CONVERGENCE_PERCENTAGE(Settings.FILE_GFX, Settings.SECTION_STEREOSCOPY, "StereoConvergencePercentage", 100), - LOGGER_VERBOSITY(Settings.FILE_LOGGER, Settings.SECTION_LOGGER_OPTIONS, "Verbosity", 1); + LOGGER_VERBOSITY(Settings.FILE_LOGGER, Settings.SECTION_LOGGER_OPTIONS, "Verbosity", 1), + + WIIMOTE_1_SOURCE(Settings.FILE_WIIMOTE, "Wiimote1", "Source", 1), + WIIMOTE_2_SOURCE(Settings.FILE_WIIMOTE, "Wiimote2", "Source", 0), + WIIMOTE_3_SOURCE(Settings.FILE_WIIMOTE, "Wiimote3", "Source", 0), + WIIMOTE_4_SOURCE(Settings.FILE_WIIMOTE, "Wiimote4", "Source", 0), + WIIMOTE_BB_SOURCE(Settings.FILE_WIIMOTE, "BalanceBoard", "Source", 0); private static final IntSetting[] NOT_RUNTIME_EDITABLE_ARRAY = new IntSetting[]{ MAIN_CPU_CORE, MAIN_GC_LANGUAGE, MAIN_SLOT_A, // Can actually be changed, but specific code is required MAIN_SLOT_B, // Can actually be changed, but specific code is required + MAIN_SI_DEVICE_0, // Can actually be changed, but specific code is required + MAIN_SI_DEVICE_1, // Can actually be changed, but specific code is required + MAIN_SI_DEVICE_2, // Can actually be changed, but specific code is required + MAIN_SI_DEVICE_3, // Can actually be changed, but specific code is required }; private static final Set NOT_RUNTIME_EDITABLE = diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyIntSetting.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyIntSetting.java deleted file mode 100644 index 0413f9a9a9dd..000000000000 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyIntSetting.java +++ /dev/null @@ -1,26 +0,0 @@ -// SPDX-License-Identifier: GPL-2.0-or-later - -package org.dolphinemu.dolphinemu.features.settings.model; - -public class LegacyIntSetting extends AbstractLegacySetting implements AbstractIntSetting -{ - private final int mDefaultValue; - - public LegacyIntSetting(String file, String section, String key, int defaultValue) - { - super(file, section, key); - mDefaultValue = defaultValue; - } - - @Override - public int getInt(Settings settings) - { - return settings.getSection(mFile, mSection).getInt(mKey, mDefaultValue); - } - - @Override - public void setInt(Settings settings, int newValue) - { - settings.getSection(mFile, mSection).setInt(mKey, newValue); - } -} diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java index b9c2c3f840ef..485dda2cf84e 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java @@ -17,7 +17,6 @@ import org.dolphinemu.dolphinemu.features.settings.model.FloatSetting; import org.dolphinemu.dolphinemu.features.settings.model.IntSetting; import org.dolphinemu.dolphinemu.features.settings.model.LegacyBooleanSetting; -import org.dolphinemu.dolphinemu.features.settings.model.LegacyIntSetting; import org.dolphinemu.dolphinemu.features.settings.model.LegacyStringSetting; import org.dolphinemu.dolphinemu.features.settings.model.PostProcessing; import org.dolphinemu.dolphinemu.features.settings.model.Settings; @@ -555,51 +554,26 @@ else if (defaultCpuCore == 4) // AArch64 private void addGcPadSettings(ArrayList sl) { - for (int i = 0; i < 4; i++) - { - // GameCube controller 1 is set to Emulated by default, all others disabled - int defaultValue = i == 0 ? 6 : 0; - - LegacyIntSetting gcPadSetting; - if (mGameID.equals("")) - { - gcPadSetting = new LegacyIntSetting(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, - SettingsFile.KEY_GCPAD_TYPE + i, defaultValue); - } - else - { - gcPadSetting = new LegacyIntSetting(Settings.GAME_SETTINGS_PLACEHOLDER_FILE_NAME, - Settings.SECTION_CONTROLS, SettingsFile.KEY_GCPAD_G_TYPE + i, defaultValue); - } - // TODO: This controller_0 + i business is quite the hack. It should work, but only if the definitions are kept together and in order. - sl.add(new SingleChoiceSetting(mContext, gcPadSetting, R.string.controller_0 + i, 0, - R.array.gcpadTypeEntries, R.array.gcpadTypeValues, MenuTag.getGCPadMenuTag(i))); - } + sl.add(new SingleChoiceSetting(mContext, IntSetting.MAIN_SI_DEVICE_0, R.string.controller_0, 0, + R.array.gcpadTypeEntries, R.array.gcpadTypeValues, MenuTag.getGCPadMenuTag(0))); + sl.add(new SingleChoiceSetting(mContext, IntSetting.MAIN_SI_DEVICE_1, R.string.controller_1, 0, + R.array.gcpadTypeEntries, R.array.gcpadTypeValues, MenuTag.getGCPadMenuTag(1))); + sl.add(new SingleChoiceSetting(mContext, IntSetting.MAIN_SI_DEVICE_2, R.string.controller_2, 0, + R.array.gcpadTypeEntries, R.array.gcpadTypeValues, MenuTag.getGCPadMenuTag(2))); + sl.add(new SingleChoiceSetting(mContext, IntSetting.MAIN_SI_DEVICE_3, R.string.controller_3, 0, + R.array.gcpadTypeEntries, R.array.gcpadTypeValues, MenuTag.getGCPadMenuTag(3))); } private void addWiimoteSettings(ArrayList sl) { - for (int i = 0; i < 4; i++) - { - // Wii Remote 1 is set to Emulated by default, all others disabled - int defaultValue = i == 0 ? 1 : 0; - - LegacyIntSetting wiimoteSetting; - if (mGameID.equals("")) - { - wiimoteSetting = new LegacyIntSetting(Settings.FILE_WIIMOTE, - Settings.SECTION_WIIMOTE + (i + 1), SettingsFile.KEY_WIIMOTE_TYPE, defaultValue); - } - else - { - wiimoteSetting = new LegacyIntSetting(Settings.GAME_SETTINGS_PLACEHOLDER_FILE_NAME, - Settings.SECTION_CONTROLS, SettingsFile.KEY_WIIMOTE_G_TYPE + i, defaultValue); - } - // TODO: This wiimote_0 + i business is quite the hack. It should work, but only if the definitions are kept together and in order. - sl.add(new SingleChoiceSetting(mContext, wiimoteSetting, R.string.wiimote_4 + i, 0, - R.array.wiimoteTypeEntries, R.array.wiimoteTypeValues, - MenuTag.getWiimoteMenuTag(i + 4))); - } + sl.add(new SingleChoiceSetting(mContext, IntSetting.WIIMOTE_1_SOURCE, R.string.wiimote_4, 0, + R.array.wiimoteTypeEntries, R.array.wiimoteTypeValues, MenuTag.getWiimoteMenuTag(4))); + sl.add(new SingleChoiceSetting(mContext, IntSetting.WIIMOTE_2_SOURCE, R.string.wiimote_5, 0, + R.array.wiimoteTypeEntries, R.array.wiimoteTypeValues, MenuTag.getWiimoteMenuTag(5))); + sl.add(new SingleChoiceSetting(mContext, IntSetting.WIIMOTE_3_SOURCE, R.string.wiimote_6, 0, + R.array.wiimoteTypeEntries, R.array.wiimoteTypeValues, MenuTag.getWiimoteMenuTag(6))); + sl.add(new SingleChoiceSetting(mContext, IntSetting.WIIMOTE_4_SOURCE, R.string.wiimote_7, 0, + R.array.wiimoteTypeEntries, R.array.wiimoteTypeValues, MenuTag.getWiimoteMenuTag(7))); } private void addGraphicsSettings(ArrayList sl) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java index dfa78e6eb2ae..d8dc74d054a8 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java @@ -20,9 +20,7 @@ public final class SettingsFile public static final String KEY_ISO_PATH_BASE = "ISOPath"; public static final String KEY_ISO_PATHS = "ISOPaths"; - public static final String KEY_GCPAD_TYPE = "SIDevice"; public static final String KEY_GCPAD_PLAYER_1 = "SIDevice0"; - public static final String KEY_GCPAD_G_TYPE = "PadType"; public static final String KEY_GCBIND_A = "InputA_"; public static final String KEY_GCBIND_B = "InputB_"; @@ -50,11 +48,9 @@ public final class SettingsFile public static final String KEY_EMU_RUMBLE = "EmuRumble"; - public static final String KEY_WIIMOTE_TYPE = "Source"; public static final String KEY_WIIMOTE_EXTENSION = "Extension"; // Controller keys for game specific settings - public static final String KEY_WIIMOTE_G_TYPE = "WiimoteSource"; public static final String KEY_WIIMOTE_PROFILE = "WiimoteProfile"; public static final String KEY_WIIBIND_A = "WiimoteA_"; diff --git a/Source/Android/app/src/main/res/values/strings.xml b/Source/Android/app/src/main/res/values/strings.xml index c403f5376817..e87eed98653d 100644 --- a/Source/Android/app/src/main/res/values/strings.xml +++ b/Source/Android/app/src/main/res/values/strings.xml @@ -6,13 +6,10 @@ app dolphinemu - GameCube Controller 1 GameCube Controller 2 GameCube Controller 3 GameCube Controller 4 - Control Stick C Stick @@ -22,23 +19,15 @@ Analog Radius (High value = High sensitivity) Analog Threshold (Low value = High sensitivity) - Wii Remote 1 Wii Remote 2 Wii Remote 3 Wii Remote 4 - - Wii Remote Extension 1 Wii Remote Extension 2 Wii Remote Extension 3 Wii Remote Extension 4 - Extension Choose and bind the Wii Remote extension. diff --git a/Source/Android/jni/Config/NativeConfig.cpp b/Source/Android/jni/Config/NativeConfig.cpp index bdeafc5f13d4..06b4bf507f3f 100644 --- a/Source/Android/jni/Config/NativeConfig.cpp +++ b/Source/Android/jni/Config/NativeConfig.cpp @@ -38,6 +38,10 @@ static Config::Location GetLocation(JNIEnv* env, jstring file, jstring section, { system = Config::System::Logger; } + else if (decoded_file == "WiimoteNew") + { + system = Config::System::WiiPad; + } else { ASSERT(false); From d20f3c9e64cb70251d9375cd1483b959d1248ac8 Mon Sep 17 00:00:00 2001 From: Dentomologist Date: Sat, 19 Feb 2022 08:46:03 -0800 Subject: [PATCH 060/237] Debugger: Fix warning on Debian builder Fix "braces around scalar initializer [-Wbraced-scalar-init]" warning --- Source/Core/Core/Debugger/PPCDebugInterface.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/Core/Core/Debugger/PPCDebugInterface.cpp b/Source/Core/Core/Debugger/PPCDebugInterface.cpp index 2d6e83c7cc9e..42082a5cfb41 100644 --- a/Source/Core/Core/Debugger/PPCDebugInterface.cpp +++ b/Source/Core/Core/Debugger/PPCDebugInterface.cpp @@ -182,7 +182,7 @@ Common::Debug::Threads PPCDebugInterface::GetThreads() const if (!active_thread->IsValid()) return threads; - std::vector visited_addrs{{active_thread->GetAddress()}}; + std::vector visited_addrs{active_thread->GetAddress()}; const auto insert_threads = [&threads, &visited_addrs](u32 addr, auto get_next_addr) { while (addr != 0 && PowerPC::HostIsRAMAddress(addr)) { From 90c576e075f9a0901236429abc9a918983da859a Mon Sep 17 00:00:00 2001 From: JosJuice Date: Tue, 25 Jan 2022 22:04:58 +0100 Subject: [PATCH 061/237] Use config changed callback to detect SD insertion/ejection This saves the GUI from having to manually call SDIO_EventNotify. With that out of the way, we can let users change the "Insert SD Card" setting on Android while a game is running. --- .../settings/model/BooleanSetting.java | 1 - Source/Core/Core/IOS/IOS.cpp | 19 ---------- Source/Core/Core/IOS/IOS.h | 2 -- Source/Core/Core/IOS/SDIO/SDIOSlot0.cpp | 35 +++++++++++++++++-- Source/Core/Core/IOS/SDIO/SDIOSlot0.h | 10 ++++-- Source/Core/DolphinQt/Settings.cpp | 4 --- 6 files changed, 40 insertions(+), 31 deletions(-) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java index a7c47a03ada2..a2b380e989f4 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java @@ -228,7 +228,6 @@ public enum BooleanSetting implements AbstractBooleanSetting MAIN_CPU_THREAD, MAIN_ENABLE_CHEATS, MAIN_OVERRIDE_REGION_SETTINGS, - MAIN_WII_SD_CARD, // Can actually be changed, but specialized code is required MAIN_MMU, MAIN_DSP_JIT, }; diff --git a/Source/Core/Core/IOS/IOS.cpp b/Source/Core/Core/IOS/IOS.cpp index 9465cbe91850..364e64783906 100644 --- a/Source/Core/Core/IOS/IOS.cpp +++ b/Source/Core/Core/IOS/IOS.cpp @@ -63,7 +63,6 @@ static std::unique_ptr s_ios; constexpr u64 ENQUEUE_REQUEST_FLAG = 0x100000000ULL; static CoreTiming::EventType* s_event_enqueue; -static CoreTiming::EventType* s_event_sdio_notify; static CoreTiming::EventType* s_event_finish_ppc_bootstrap; static CoreTiming::EventType* s_event_finish_ios_boot; @@ -789,14 +788,6 @@ void Kernel::UpdateWantDeterminism(const bool new_want_determinism) device.second->UpdateWantDeterminism(new_want_determinism); } -void Kernel::SDIO_EventNotify() -{ - // TODO: Potential race condition: If IsRunning() becomes false after - // it's checked, an event may be scheduled after CoreTiming shuts down. - if (SConfig::GetInstance().bWii && Core::IsRunning()) - CoreTiming::ScheduleEvent(0, s_event_sdio_notify, 0, CoreTiming::FromThread::NON_CPU); -} - void Kernel::DoState(PointerWrap& p) { p.Do(m_request_queue); @@ -886,16 +877,6 @@ void Init() s_ios->HandleIPCEvent(userdata); }); - s_event_sdio_notify = CoreTiming::RegisterEvent("SDIO_EventNotify", [](u64, s64) { - if (!s_ios) - return; - - auto sdio_slot0 = s_ios->GetDeviceByName("/dev/sdio/slot0"); - auto device = static_cast(sdio_slot0.get()); - if (device) - device->EventNotify(); - }); - ESDevice::InitializeEmulationState(); s_event_finish_ppc_bootstrap = diff --git a/Source/Core/Core/IOS/IOS.h b/Source/Core/Core/IOS/IOS.h index 14254fa9cfea..773a5d4d64d5 100644 --- a/Source/Core/Core/IOS/IOS.h +++ b/Source/Core/Core/IOS/IOS.h @@ -125,8 +125,6 @@ class Kernel std::shared_ptr GetFSDevice(); std::shared_ptr GetES(); - void SDIO_EventNotify(); - void EnqueueIPCRequest(u32 address); void EnqueueIPCReply(const Request& request, s32 return_value, s64 cycles_in_future = 0, CoreTiming::FromThread from = CoreTiming::FromThread::CPU); diff --git a/Source/Core/Core/IOS/SDIO/SDIOSlot0.cpp b/Source/Core/Core/IOS/SDIO/SDIOSlot0.cpp index 4ea206e6ce8e..8cb17f32671e 100644 --- a/Source/Core/Core/IOS/SDIO/SDIOSlot0.cpp +++ b/Source/Core/Core/IOS/SDIO/SDIOSlot0.cpp @@ -14,8 +14,10 @@ #include "Common/IOFile.h" #include "Common/Logging/Log.h" #include "Common/SDCardUtil.h" + #include "Core/Config/MainSettings.h" #include "Core/Config/SessionSettings.h" +#include "Core/Core.h" #include "Core/HW/Memmap.h" #include "Core/IOS/IOS.h" #include "Core/IOS/VersionInfo.h" @@ -27,6 +29,29 @@ SDIOSlot0Device::SDIOSlot0Device(Kernel& ios, const std::string& device_name) { if (!Config::Get(Config::MAIN_ALLOW_SD_WRITES)) INFO_LOG_FMT(IOS_SD, "Writes to SD card disabled by user"); + + m_config_callback_id = Config::AddConfigChangedCallback([this] { RefreshConfig(); }); + m_sd_card_inserted = Config::Get(Config::MAIN_WII_SD_CARD); +} + +SDIOSlot0Device::~SDIOSlot0Device() +{ + Config::RemoveConfigChangedCallback(m_config_callback_id); +} + +void SDIOSlot0Device::RefreshConfig() +{ + if (m_sd_card_inserted != Config::Get(Config::MAIN_WII_SD_CARD)) + { + Core::RunAsCPUThread([this] { + const bool sd_card_inserted = Config::Get(Config::MAIN_WII_SD_CARD); + if (m_sd_card_inserted != sd_card_inserted) + { + m_sd_card_inserted = sd_card_inserted; + EventNotify(); + } + }); + } } void SDIOSlot0Device::DoState(PointerWrap& p) @@ -49,10 +74,14 @@ void SDIOSlot0Device::EventNotify() if (!m_event) return; - const bool sd_card_inserted = Config::Get(Config::MAIN_WII_SD_CARD); - if ((sd_card_inserted && m_event->type == EVENT_INSERT) || - (!sd_card_inserted && m_event->type == EVENT_REMOVE)) + if ((m_sd_card_inserted && m_event->type == EVENT_INSERT) || + (!m_sd_card_inserted && m_event->type == EVENT_REMOVE)) { + if (m_sd_card_inserted) + INFO_LOG_FMT(IOS_SD, "Notifying PPC of SD card insertion"); + else + INFO_LOG_FMT(IOS_SD, "Notifying PPC of SD card removal"); + m_ios.EnqueueIPCReply(m_event->request, m_event->type); m_event.reset(); } diff --git a/Source/Core/Core/IOS/SDIO/SDIOSlot0.h b/Source/Core/Core/IOS/SDIO/SDIOSlot0.h index efa6595c781e..fcdabd372e76 100644 --- a/Source/Core/Core/IOS/SDIO/SDIOSlot0.h +++ b/Source/Core/Core/IOS/SDIO/SDIOSlot0.h @@ -22,6 +22,7 @@ class SDIOSlot0Device : public Device { public: SDIOSlot0Device(Kernel& ios, const std::string& device_name); + ~SDIOSlot0Device() override; void DoState(PointerWrap& p) override; @@ -30,8 +31,6 @@ class SDIOSlot0Device : public Device std::optional IOCtl(const IOCtlRequest& request) override; std::optional IOCtlV(const IOCtlVRequest& request) override; - void EventNotify(); - private: // SD Host Controller Registers enum @@ -124,6 +123,10 @@ class SDIOSlot0Device : public Device Request request; }; + void RefreshConfig(); + + void EventNotify(); + IPCReply WriteHCRegister(const IOCtlRequest& request); IPCReply ReadHCRegister(const IOCtlRequest& request); IPCReply ResetCard(const IOCtlRequest& request); @@ -162,5 +165,8 @@ class SDIOSlot0Device : public Device std::array m_registers{}; File::IOFile m_card; + + size_t m_config_callback_id; + bool m_sd_card_inserted = false; }; } // namespace IOS::HLE diff --git a/Source/Core/DolphinQt/Settings.cpp b/Source/Core/DolphinQt/Settings.cpp index d22dcc6e256a..61e378f7364d 100644 --- a/Source/Core/DolphinQt/Settings.cpp +++ b/Source/Core/DolphinQt/Settings.cpp @@ -705,10 +705,6 @@ void Settings::SetSDCardInserted(bool inserted) { Config::SetBaseOrCurrent(Config::MAIN_WII_SD_CARD, inserted); emit SDCardInsertionChanged(inserted); - - auto* ios = IOS::HLE::GetIOS(); - if (ios) - ios->SDIO_EventNotify(); } } From 2273742f9e073e7e9b3b9c02bc1994549523ae5a Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sun, 20 Feb 2022 13:48:06 +0100 Subject: [PATCH 062/237] Android: Get rid of LegacyBooleanSetting --- .../settings/model/BooleanSetting.java | 20 ++++++++++++++ .../settings/model/LegacyBooleanSetting.java | 26 ------------------- .../ui/SettingsFragmentPresenter.java | 14 +++------- .../features/settings/utils/SettingsFile.java | 3 --- 4 files changed, 24 insertions(+), 39 deletions(-) delete mode 100644 Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyBooleanSetting.java diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java index a7c47a03ada2..35130eaeccfb 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.java @@ -18,6 +18,14 @@ public enum BooleanSetting implements AbstractBooleanSetting MAIN_OVERRIDE_REGION_SETTINGS(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "OverrideRegionSettings", false), MAIN_AUDIO_STRETCH(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "AudioStretch", false), + MAIN_ADAPTER_RUMBLE_0(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "AdapterRumble0", true), + MAIN_ADAPTER_RUMBLE_1(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "AdapterRumble1", true), + MAIN_ADAPTER_RUMBLE_2(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "AdapterRumble2", true), + MAIN_ADAPTER_RUMBLE_3(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "AdapterRumble3", true), + MAIN_SIMULATE_KONGA_0(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SimulateKonga0", false), + MAIN_SIMULATE_KONGA_1(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SimulateKonga1", false), + MAIN_SIMULATE_KONGA_2(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SimulateKonga2", false), + MAIN_SIMULATE_KONGA_3(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SimulateKonga3", false), MAIN_WII_SD_CARD(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "WiiSDCard", true), MAIN_WIIMOTE_CONTINUOUS_SCANNING(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "WiimoteContinuousScanning", false), @@ -322,4 +330,16 @@ public void setBooleanGlobal(int layer, boolean newValue) { NativeConfig.setBoolean(layer, mFile, mSection, mKey, newValue); } + + public static BooleanSetting getSettingForAdapterRumble(int channel) + { + return new BooleanSetting[]{MAIN_ADAPTER_RUMBLE_0, MAIN_ADAPTER_RUMBLE_1, MAIN_ADAPTER_RUMBLE_2, + MAIN_ADAPTER_RUMBLE_3}[channel]; + } + + public static BooleanSetting getSettingForSimulateKonga(int channel) + { + return new BooleanSetting[]{MAIN_SIMULATE_KONGA_0, MAIN_SIMULATE_KONGA_1, MAIN_SIMULATE_KONGA_2, + MAIN_SIMULATE_KONGA_3}[channel]; + } } diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyBooleanSetting.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyBooleanSetting.java deleted file mode 100644 index d754871445f4..000000000000 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/LegacyBooleanSetting.java +++ /dev/null @@ -1,26 +0,0 @@ -// SPDX-License-Identifier: GPL-2.0-or-later - -package org.dolphinemu.dolphinemu.features.settings.model; - -public class LegacyBooleanSetting extends AbstractLegacySetting implements AbstractBooleanSetting -{ - private final boolean mDefaultValue; - - public LegacyBooleanSetting(String file, String section, String key, boolean defaultValue) - { - super(file, section, key); - mDefaultValue = defaultValue; - } - - @Override - public boolean getBoolean(Settings settings) - { - return settings.getSection(mFile, mSection).getBoolean(mKey, mDefaultValue); - } - - @Override - public void setBoolean(Settings settings, boolean newValue) - { - settings.getSection(mFile, mSection).setBoolean(mKey, newValue); - } -} diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java index 485dda2cf84e..58869a9e95d5 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.java @@ -16,7 +16,6 @@ import org.dolphinemu.dolphinemu.features.settings.model.BooleanSetting; import org.dolphinemu.dolphinemu.features.settings.model.FloatSetting; import org.dolphinemu.dolphinemu.features.settings.model.IntSetting; -import org.dolphinemu.dolphinemu.features.settings.model.LegacyBooleanSetting; import org.dolphinemu.dolphinemu.features.settings.model.LegacyStringSetting; import org.dolphinemu.dolphinemu.features.settings.model.PostProcessing; import org.dolphinemu.dolphinemu.features.settings.model.Settings; @@ -865,15 +864,10 @@ private void addGcPadSubSettings(ArrayList sl, int gcPadNumber, in } else if (gcPadType == 12) // Adapter { - LegacyBooleanSetting rumble = new LegacyBooleanSetting(Settings.FILE_DOLPHIN, - Settings.SECTION_INI_CORE, SettingsFile.KEY_GCADAPTER_RUMBLE + gcPadNumber, false); - LegacyBooleanSetting bongo = new LegacyBooleanSetting(Settings.FILE_DOLPHIN, - Settings.SECTION_INI_CORE, SettingsFile.KEY_GCADAPTER_BONGOS + gcPadNumber, false); - - sl.add(new CheckBoxSetting(mContext, rumble, R.string.gc_adapter_rumble, - R.string.gc_adapter_rumble_description)); - sl.add(new CheckBoxSetting(mContext, bongo, R.string.gc_adapter_bongos, - R.string.gc_adapter_bongos_description)); + sl.add(new CheckBoxSetting(mContext, BooleanSetting.getSettingForAdapterRumble(gcPadNumber), + R.string.gc_adapter_rumble, R.string.gc_adapter_rumble_description)); + sl.add(new CheckBoxSetting(mContext, BooleanSetting.getSettingForSimulateKonga(gcPadNumber), + R.string.gc_adapter_bongos, R.string.gc_adapter_bongos_description)); } } diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java index d8dc74d054a8..87f58b4d02e1 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/utils/SettingsFile.java @@ -43,9 +43,6 @@ public final class SettingsFile public static final String KEY_GCBIND_DPAD_LEFT = "DPadLeft_"; public static final String KEY_GCBIND_DPAD_RIGHT = "DPadRight_"; - public static final String KEY_GCADAPTER_RUMBLE = "AdapterRumble"; - public static final String KEY_GCADAPTER_BONGOS = "SimulateKonga"; - public static final String KEY_EMU_RUMBLE = "EmuRumble"; public static final String KEY_WIIMOTE_EXTENSION = "Extension"; From 23cbd570a1c58c7d24c83ae7e3788611c38717dc Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Mon, 21 Feb 2022 12:42:51 -0800 Subject: [PATCH 063/237] FramebufferManager: Flush pending EFB pokes in PopulateEFBCache I.e. flush pokes before running an EFB peek, if the cache tile isn't present. If the cache tile is present, then EFB pokes should have been written to the cache tile and thus don't need to be flushed. --- Source/Core/VideoCommon/FramebufferManager.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/Source/Core/VideoCommon/FramebufferManager.cpp b/Source/Core/VideoCommon/FramebufferManager.cpp index b805de77a84b..c6cb2fd29ed1 100644 --- a/Source/Core/VideoCommon/FramebufferManager.cpp +++ b/Source/Core/VideoCommon/FramebufferManager.cpp @@ -593,6 +593,7 @@ void FramebufferManager::DestroyReadbackFramebuffer() void FramebufferManager::PopulateEFBCache(bool depth, u32 tile_index) { + FlushEFBPokes(); g_vertex_manager->OnCPUEFBAccess(); // Force the path through the intermediate texture, as we can't do an image copy from a depth From 281dfd3e6a5503a3b06537440870802aaf6bf298 Mon Sep 17 00:00:00 2001 From: Pierre Bourdon Date: Wed, 23 Feb 2022 18:39:28 +0100 Subject: [PATCH 064/237] Fix manual update check which was hardcoded to "dev" track --- Source/Core/DolphinQt/MenuBar.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Source/Core/DolphinQt/MenuBar.cpp b/Source/Core/DolphinQt/MenuBar.cpp index 0c08dd3390d6..7eb5e4a540b5 100644 --- a/Source/Core/DolphinQt/MenuBar.cpp +++ b/Source/Core/DolphinQt/MenuBar.cpp @@ -549,7 +549,8 @@ void MenuBar::AddOptionsMenu() void MenuBar::InstallUpdateManually() { auto* updater = - new Updater(this->parentWidget(), "dev", Config::Get(Config::MAIN_AUTOUPDATE_HASH_OVERRIDE)); + new Updater(this->parentWidget(), Config::Get(Config::MAIN_AUTOUPDATE_UPDATE_TRACK), + Config::Get(Config::MAIN_AUTOUPDATE_HASH_OVERRIDE)); if (!updater->CheckForUpdate()) { From 7d76eea4ea0aeeba84f0e9501f16d043965ecab5 Mon Sep 17 00:00:00 2001 From: Pokechu22 Date: Wed, 23 Feb 2022 18:30:02 -0800 Subject: [PATCH 065/237] OGLRender: Log video backend info, in addition to showing it via OSD This is mainly intended for debugging fifo.ci. --- Source/Core/VideoBackends/OGL/OGLRender.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Source/Core/VideoBackends/OGL/OGLRender.cpp b/Source/Core/VideoBackends/OGL/OGLRender.cpp index 123b11b2d649..fae0d7a0ad10 100644 --- a/Source/Core/VideoBackends/OGL/OGLRender.cpp +++ b/Source/Core/VideoBackends/OGL/OGLRender.cpp @@ -751,6 +751,8 @@ Renderer::Renderer(std::unique_ptr main_gl_context, float backbuffer_ OSD::AddMessage("This device's performance may be poor.", 60000); } + INFO_LOG_FMT(VIDEO, "Video Info: {}, {}, {}", g_ogl_config.gl_vendor, g_ogl_config.gl_renderer, + g_ogl_config.gl_version); WARN_LOG_FMT(VIDEO, "Missing OGL Extensions: {}{}{}{}{}{}{}{}{}{}{}{}{}{}", g_ActiveConfig.backend_info.bSupportsDualSourceBlend ? "" : "DualSourceBlend ", g_ActiveConfig.backend_info.bSupportsPrimitiveRestart ? "" : "PrimitiveRestart ", From b641dc3d3454a2f88e09f165f09e1be11eb68bdd Mon Sep 17 00:00:00 2001 From: Nora Date: Thu, 24 Feb 2022 21:00:16 -0500 Subject: [PATCH 066/237] Add files via upload --- Data/Sys/GameSettings/GMPE01.ini | 22 ++++++++++++++++++++++ Data/Sys/GameSettings/GP5E01.ini | 10 ++++++++++ Data/Sys/GameSettings/GP6E01.ini | 5 +++++ Data/Sys/GameSettings/GP7E01.ini | 12 ++++++++++++ 4 files changed, 49 insertions(+) diff --git a/Data/Sys/GameSettings/GMPE01.ini b/Data/Sys/GameSettings/GMPE01.ini index 1cd20fde2208..375891191f06 100644 --- a/Data/Sys/GameSettings/GMPE01.ini +++ b/Data/Sys/GameSettings/GMPE01.ini @@ -117,12 +117,19 @@ c2086774 00000002 c2073c7c 00000002 735a0020 281a0020 60000000 00000000 +04005E90 60000000 +04005EA8 60000000 *Fixes a bug where only analog inputs work while using the triggers. $QOL - Automatically Advance Text Boxes [gamemasterplc] 04044A90 60000000 *Automatically scrolls the text boxes without pushing A +$QOL - Disable Advance on Results [gamemasterplc] +20446468 4182FF60 +04446468 4BFFFF60 +E2000001 80008000 + $QOL - Faster Boot Time [Ralf] 04056168 38607FFF *Automatically advance through the initial cutscences. @@ -429,3 +436,18 @@ $Minigame: Take a Breather - Mash Only L [gamemasterplc] 044207f0 38600000 e2000001 80008000 *Mash just L in the minigame Take a Breather. + +$Minigame Replacement - Bowser's Bigger Blast ➜ Chain Chomp Fever [Nora] +2818fd2c 00000027 +0218fd2c 00000025 +e2000001 80008000 + +$Minigame Replacement - Dungeon Duos ➜ The Great Deflate [Nora] +2818fd2c 0000001F +0218fd2c 00000018 +e2000001 80008000 + +$Minigame Replacement - Three Throw ➜ Mr. Blizzard's Brigade [Nora] +2818fd2c 00000009 +0218fd2c 0000000B +e2000001 80008000 \ No newline at end of file diff --git a/Data/Sys/GameSettings/GP5E01.ini b/Data/Sys/GameSettings/GP5E01.ini index 39f95bb06861..256ae9d2b82d 100644 --- a/Data/Sys/GameSettings/GP5E01.ini +++ b/Data/Sys/GameSettings/GP5E01.ini @@ -474,6 +474,11 @@ A81F019C 7C001838 E0000000 80008000 *Automatically scrolls the text boxes without pushing A +$QOL - Disable Advance on Results [gamemasterplc] +20478F28 4182F754 +04478F1C 48000010 +E2000001 80008000 + $QOL - Faster Boot Time [gamemasterplc] F6000001 80008180 38610030 38800002 @@ -858,6 +863,11 @@ $Minigame Replacement - Scaldin' Cauldron ➜ Rain of Fire [gamemasterplc] 0222A4C4 0000003A E2000001 80008000 +$Minigame Replacement - Submarathon ➜ Defuse or Lose +2822a4c4 0000002F +0222a4c4 00000028 +e2000001 80008000 + $Minigame Replacement - Vicious Vending ➜ Coin Cache [gamemasterplc] 2822a4c4 00000017 0222a4c4 0000000f diff --git a/Data/Sys/GameSettings/GP6E01.ini b/Data/Sys/GameSettings/GP6E01.ini index 36948b933552..47eb810071c8 100644 --- a/Data/Sys/GameSettings/GP6E01.ini +++ b/Data/Sys/GameSettings/GP6E01.ini @@ -466,6 +466,11 @@ e2000001 80008000 E2000001 80008000 *Automatically advance through the initial cutscences. +$QOL - Disable Advance on Results [gamemasterplc] +20503FB0 41820054 +04503FB0 48000054 +E2000001 80008000 + $QOL - Increased Board Speed [Celerizer] 041556d4 7d170e70 042c31b4 41f00000 diff --git a/Data/Sys/GameSettings/GP7E01.ini b/Data/Sys/GameSettings/GP7E01.ini index 2686dc68f97b..37ab3225a7de 100644 --- a/Data/Sys/GameSettings/GP7E01.ini +++ b/Data/Sys/GameSettings/GP7E01.ini @@ -597,6 +597,11 @@ e2000001 80008000 e2000001 80008000 *Automatically advance through the initial cutscences. +$QOL - Disable Advance on Results [gamemasterplc] +204DF310 41820050 +044DF310 48000050 +E2000001 80008000 + $QOL - Increased Board Speed [gamemasterplc] 04160ad8 38030002 041604b0 38c0000a @@ -1033,6 +1038,13 @@ c21d8ed0 00000007 041d8f68 3aa00028 *(X) -> 40 Coins, 10 Coins -> 40 Coins, Half Coins -> 40 Coins, 2 Stars -> 1 Star +$Mechanics - Improved Duel Results #2 [Airsola +C21D8ED0 00000003 +2C030003 40820008 +38600001 7C711B78 +60000000 00000000 +*(X) -> 1/2 Coins + $Mechanics - Last 5 Turns Event Is Always x3 Coins on Spaces [gamemasterplc] 042311A8 38000000 From 97ca7b56cbd54faa1037083ec3533a7b1d696418 Mon Sep 17 00:00:00 2001 From: Nora Date: Fri, 25 Feb 2022 20:51:42 -0500 Subject: [PATCH 067/237] Update GMPE01.ini --- Data/Sys/GameSettings/GMPE01.ini | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Data/Sys/GameSettings/GMPE01.ini b/Data/Sys/GameSettings/GMPE01.ini index 375891191f06..b77cbb6231e5 100644 --- a/Data/Sys/GameSettings/GMPE01.ini +++ b/Data/Sys/GameSettings/GMPE01.ini @@ -117,8 +117,6 @@ c2086774 00000002 c2073c7c 00000002 735a0020 281a0020 60000000 00000000 -04005E90 60000000 -04005EA8 60000000 *Fixes a bug where only analog inputs work while using the triggers. $QOL - Automatically Advance Text Boxes [gamemasterplc] @@ -450,4 +448,4 @@ e2000001 80008000 $Minigame Replacement - Three Throw ➜ Mr. Blizzard's Brigade [Nora] 2818fd2c 00000009 0218fd2c 0000000B -e2000001 80008000 \ No newline at end of file +e2000001 80008000 From 01bd5e15ba4484c6d49ae569dc620e42f4218838 Mon Sep 17 00:00:00 2001 From: Dentomologist Date: Fri, 25 Feb 2022 21:07:39 -0800 Subject: [PATCH 068/237] FileUtil: Remove redundant statement --- Source/Core/Common/FileUtil.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/Source/Core/Common/FileUtil.cpp b/Source/Core/Common/FileUtil.cpp index d84b725e70b9..8715c50a990a 100644 --- a/Source/Core/Common/FileUtil.cpp +++ b/Source/Core/Common/FileUtil.cpp @@ -859,7 +859,6 @@ std::string GetExePath() } #elif defined(__APPLE__) result = GetBundleDirectory(); - result = result.substr(0, result.find_last_of("Dolphin.app/Contents/MacOS") + 1); #else char dolphin_exe_path[PATH_MAX]; ssize_t len = ::readlink("/proc/self/exe", dolphin_exe_path, sizeof(dolphin_exe_path)); From 1fd9a1117ec37cad25aab8cd81c7fd45264495a2 Mon Sep 17 00:00:00 2001 From: "Admiral H. Curtiss" Date: Sat, 26 Feb 2022 23:06:33 +0100 Subject: [PATCH 069/237] Qt/GeneralPane: Don't trigger config change events when populating GUI. --- Source/Core/DolphinQt/CMakeLists.txt | 1 + Source/Core/DolphinQt/DolphinQt.vcxproj | 1 + .../Core/DolphinQt/QtUtils/SignalBlocking.h | 32 ++++++++++++++ .../Core/DolphinQt/Settings/GeneralPane.cpp | 42 ++++++++++--------- 4 files changed, 56 insertions(+), 20 deletions(-) create mode 100644 Source/Core/DolphinQt/QtUtils/SignalBlocking.h diff --git a/Source/Core/DolphinQt/CMakeLists.txt b/Source/Core/DolphinQt/CMakeLists.txt index 4ca4a216fbc7..fe7a42604e3c 100644 --- a/Source/Core/DolphinQt/CMakeLists.txt +++ b/Source/Core/DolphinQt/CMakeLists.txt @@ -279,6 +279,7 @@ add_executable(dolphin-emu QtUtils/PartiallyClosableTabWidget.h QtUtils/ImageConverter.cpp QtUtils/ImageConverter.h + QtUtils/SignalBlocking.h QtUtils/UTF8CodePointCountValidator.cpp QtUtils/UTF8CodePointCountValidator.h QtUtils/WindowActivationEventFilter.cpp diff --git a/Source/Core/DolphinQt/DolphinQt.vcxproj b/Source/Core/DolphinQt/DolphinQt.vcxproj index b5db932b0745..636cc514b66a 100644 --- a/Source/Core/DolphinQt/DolphinQt.vcxproj +++ b/Source/Core/DolphinQt/DolphinQt.vcxproj @@ -225,6 +225,7 @@ + diff --git a/Source/Core/DolphinQt/QtUtils/SignalBlocking.h b/Source/Core/DolphinQt/QtUtils/SignalBlocking.h new file mode 100644 index 000000000000..296ed69f2284 --- /dev/null +++ b/Source/Core/DolphinQt/QtUtils/SignalBlocking.h @@ -0,0 +1,32 @@ +// Copyright 2022 Dolphin Emulator Project +// SPDX-License-Identifier: GPL-2.0-or-later + +#pragma once + +#include + +// Helper class for populating a GUI element without triggering its data change signals. + +template +class SignalBlockerProxy +{ +public: + explicit SignalBlockerProxy(T* object) : m_object(object), m_blocker(object) {} + SignalBlockerProxy(const SignalBlockerProxy& other) = delete; + SignalBlockerProxy(SignalBlockerProxy&& other) = default; + SignalBlockerProxy& operator=(const SignalBlockerProxy& other) = delete; + SignalBlockerProxy& operator=(SignalBlockerProxy&& other) = default; + ~SignalBlockerProxy() = default; + + T* operator->() const { return m_object; } + +private: + T* m_object; + QSignalBlocker m_blocker; +}; + +template +SignalBlockerProxy SignalBlocking(T* object) +{ + return SignalBlockerProxy(object); +} diff --git a/Source/Core/DolphinQt/Settings/GeneralPane.cpp b/Source/Core/DolphinQt/Settings/GeneralPane.cpp index e8ec3eeab046..f877e68a2715 100644 --- a/Source/Core/DolphinQt/Settings/GeneralPane.cpp +++ b/Source/Core/DolphinQt/Settings/GeneralPane.cpp @@ -23,6 +23,7 @@ #include "Core/PowerPC/PowerPC.h" #include "DolphinQt/QtUtils/ModalMessageBox.h" +#include "DolphinQt/QtUtils/SignalBlocking.h" #include "DolphinQt/Settings.h" #include "UICommon/AutoUpdate.h" @@ -238,45 +239,46 @@ void GeneralPane::LoadConfig() if (AutoUpdateChecker::SystemSupportsAutoUpdates()) { const auto track = Settings::Instance().GetAutoUpdateTrack().toStdString(); - if (track == AUTO_UPDATE_DISABLE_STRING) - m_combobox_update_track->setCurrentIndex(AUTO_UPDATE_DISABLE_INDEX); + SignalBlocking(m_combobox_update_track)->setCurrentIndex(AUTO_UPDATE_DISABLE_INDEX); else if (track == AUTO_UPDATE_STABLE_STRING) - m_combobox_update_track->setCurrentIndex(AUTO_UPDATE_STABLE_INDEX); + SignalBlocking(m_combobox_update_track)->setCurrentIndex(AUTO_UPDATE_STABLE_INDEX); else if (track == AUTO_UPDATE_BETA_STRING) - m_combobox_update_track->setCurrentIndex(AUTO_UPDATE_BETA_INDEX); + SignalBlocking(m_combobox_update_track)->setCurrentIndex(AUTO_UPDATE_BETA_INDEX); else - m_combobox_update_track->setCurrentIndex(AUTO_UPDATE_DEV_INDEX); + SignalBlocking(m_combobox_update_track)->setCurrentIndex(AUTO_UPDATE_DEV_INDEX); } #if defined(USE_ANALYTICS) && USE_ANALYTICS - m_checkbox_enable_analytics->setChecked(Settings::Instance().IsAnalyticsEnabled()); + SignalBlocking(m_checkbox_enable_analytics) + ->setChecked(Settings::Instance().IsAnalyticsEnabled()); #endif - m_checkbox_dualcore->setChecked(Config::Get(Config::MAIN_CPU_THREAD)); - m_checkbox_cheats->setChecked(Settings::Instance().GetCheatsEnabled()); - m_checkbox_override_region_settings->setChecked( - Config::Get(Config::MAIN_OVERRIDE_REGION_SETTINGS)); - m_checkbox_auto_disc_change->setChecked(Config::Get(Config::MAIN_AUTO_DISC_CHANGE)); + SignalBlocking(m_checkbox_dualcore)->setChecked(Config::Get(Config::MAIN_CPU_THREAD)); + SignalBlocking(m_checkbox_cheats)->setChecked(Settings::Instance().GetCheatsEnabled()); + SignalBlocking(m_checkbox_override_region_settings) + ->setChecked(Config::Get(Config::MAIN_OVERRIDE_REGION_SETTINGS)); + SignalBlocking(m_checkbox_auto_disc_change) + ->setChecked(Config::Get(Config::MAIN_AUTO_DISC_CHANGE)); + #ifdef USE_DISCORD_PRESENCE - m_checkbox_discord_presence->setChecked(Config::Get(Config::MAIN_USE_DISCORD_PRESENCE)); + SignalBlocking(m_checkbox_discord_presence) + ->setChecked(Config::Get(Config::MAIN_USE_DISCORD_PRESENCE)); #endif int selection = qRound(Config::Get(Config::MAIN_EMULATION_SPEED) * 10); if (selection < m_combobox_speedlimit->count()) - m_combobox_speedlimit->setCurrentIndex(selection); - m_checkbox_dualcore->setChecked(Config::Get(Config::MAIN_CPU_THREAD)); + SignalBlocking(m_combobox_speedlimit)->setCurrentIndex(selection); const auto fallback = Settings::Instance().GetFallbackRegion(); - if (fallback == DiscIO::Region::NTSC_J) - m_combobox_fallback_region->setCurrentIndex(FALLBACK_REGION_NTSCJ_INDEX); + SignalBlocking(m_combobox_fallback_region)->setCurrentIndex(FALLBACK_REGION_NTSCJ_INDEX); else if (fallback == DiscIO::Region::NTSC_U) - m_combobox_fallback_region->setCurrentIndex(FALLBACK_REGION_NTSCU_INDEX); + SignalBlocking(m_combobox_fallback_region)->setCurrentIndex(FALLBACK_REGION_NTSCU_INDEX); else if (fallback == DiscIO::Region::PAL) - m_combobox_fallback_region->setCurrentIndex(FALLBACK_REGION_PAL_INDEX); + SignalBlocking(m_combobox_fallback_region)->setCurrentIndex(FALLBACK_REGION_PAL_INDEX); else if (fallback == DiscIO::Region::NTSC_K) - m_combobox_fallback_region->setCurrentIndex(FALLBACK_REGION_NTSCK_INDEX); + SignalBlocking(m_combobox_fallback_region)->setCurrentIndex(FALLBACK_REGION_NTSCK_INDEX); else - m_combobox_fallback_region->setCurrentIndex(FALLBACK_REGION_NTSCJ_INDEX); + SignalBlocking(m_combobox_fallback_region)->setCurrentIndex(FALLBACK_REGION_NTSCJ_INDEX); } static QString UpdateTrackFromIndex(int index) From c95c43bfb6f28861380de41b9c1e7f45ae6243e4 Mon Sep 17 00:00:00 2001 From: Tony Gong Date: Sun, 27 Feb 2022 09:58:21 -0800 Subject: [PATCH 070/237] Make pos/neg analog axes symmetrical Currently, the axes for the main and C sticks range from 0-255, with 128 being the mid-point; but this isn't symmetrical: the negative axis has 128 values not including 0, while the positive axis has 127 values not including 0. Normalizing so that the range is 1-255 makes the positive and negative axes symmetrical. The inability to yield 0 shouldn't be an issue as a real GC controller cannot yield it anyway. --- Source/Core/Core/HW/GCPadEmu.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Source/Core/Core/HW/GCPadEmu.cpp b/Source/Core/Core/HW/GCPadEmu.cpp index 13fb79e28a13..e387d4765aac 100644 --- a/Source/Core/Core/HW/GCPadEmu.cpp +++ b/Source/Core/Core/HW/GCPadEmu.cpp @@ -158,12 +158,12 @@ GCPadStatus GCPad::GetInput() const // sticks const auto main_stick_state = m_main_stick->GetState(); - pad.stickX = MapFloat(main_stick_state.x, GCPadStatus::MAIN_STICK_CENTER_X); - pad.stickY = MapFloat(main_stick_state.y, GCPadStatus::MAIN_STICK_CENTER_Y); + pad.stickX = MapFloat(main_stick_state.x, GCPadStatus::MAIN_STICK_CENTER_X, 1); + pad.stickY = MapFloat(main_stick_state.y, GCPadStatus::MAIN_STICK_CENTER_Y, 1); const auto c_stick_state = m_c_stick->GetState(); - pad.substickX = MapFloat(c_stick_state.x, GCPadStatus::C_STICK_CENTER_X); - pad.substickY = MapFloat(c_stick_state.y, GCPadStatus::C_STICK_CENTER_Y); + pad.substickX = MapFloat(c_stick_state.x, GCPadStatus::C_STICK_CENTER_X, 1); + pad.substickY = MapFloat(c_stick_state.y, GCPadStatus::C_STICK_CENTER_Y, 1); // triggers std::array triggers; From 8588272b3bf2bcc1689e331873b5c0e0876b5a57 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sun, 27 Feb 2022 22:45:21 +0100 Subject: [PATCH 071/237] Android: Set letterSpacing for savestate options fbe7cf6 set this for most buttons in the in-game menu but missed the savestate-related buttons and Unpause Emulation. --- .../src/main/res/layout/fragment_ingame_menu.xml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/Source/Android/app/src/main/res/layout/fragment_ingame_menu.xml b/Source/Android/app/src/main/res/layout/fragment_ingame_menu.xml index 04e996bfda79..71dd4b6ad8de 100644 --- a/Source/Android/app/src/main/res/layout/fragment_ingame_menu.xml +++ b/Source/Android/app/src/main/res/layout/fragment_ingame_menu.xml @@ -43,8 +43,9 @@